diff --git a/.editorconfig b/.editorconfig index 6bbba23b6e21..6ac165fc1147 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,6 +10,7 @@ insert_final_newline = true [*.java] indent_size = 4 +ij_continuation_indent_size = 8 trim_trailing_whitespace = true [*.{js,jsx}] diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 6fc18d492a1c..000000000000 --- a/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,34 +0,0 @@ - - -## Expected Behavior - - - -## Current Behavior - - - -## Possible Solution - - - -## Steps to Reproduce (for bugs) - - -1. -2. -3. -4. - -## Context - - - -## Your Environment - - -* Graylog Version: -* Elasticsearch Version: -* MongoDB Version: -* Operating System: -* Browser version: diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000000..47e44ba8d50e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,44 @@ +--- +name: Bug report / change request +about: Create a report to help us improve. This includes bugs but also changing the behavior of the product. +title: '' +labels: bug +assignees: '' + +--- + + + +## Expected Behavior + + + +## Current Behavior + + + +## Possible Solution + + + +## Steps to Reproduce (for bugs) + + +1. +2. +3. +4. + +## Context + + + +## Your Environment + + +* Graylog Version: +* Java Version: +* OpenSearch Version: +* MongoDB Version: +* Operating System: +* Browser version: diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000000..f89423a79e78 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,28 @@ +--- +name: Feature request +about: Suggest an idea for this project about a missing feature +title: '' +labels: feature +assignees: '' + +--- + +## What? + + + +## Why? + + + + + + +## Your Environment + + +* Graylog Version: +* OpenSearch Version: +* MongoDB Version: +* Operating System: +* Browser version: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 4f9ceac420a2..54943c15776a 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -18,6 +18,7 @@ - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) +- [ ] Refactoring (non-breaking change) - [ ] Breaking change (fix or feature that would cause existing functionality to change) ## Checklist: @@ -28,4 +29,4 @@ - [ ] I have updated the documentation accordingly. - [ ] I have read the **CONTRIBUTING** document. - [ ] I have added tests to cover my changes. -- [ ] All new and existing tests passed. + diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000000..faf65cd874b1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,140 @@ +version: 2 +updates: + - package-ecosystem: maven + directory: "/" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + ignore: + # The AWS SDKs receive patch updates almost every day. Reduce dependabot + # pull-request noise by ignoring patch updates. + - dependency-name: "software.amazon.awssdk:bom" + update-types: ["version-update:semver-patch"] + - dependency-name: "com.amazonaws:aws-java-sdk-bom" + update-types: ["version-update:semver-patch"] + # Lucene >=10 requires JDK 21 + - dependency-name: "org.apache.lucene:lucene-queryparser" + versions: + - "> 9" + - dependency-name: "org.apache.lucene:lucene-analysis-common" + versions: + - "> 9" + open-pull-requests-limit: 25 + labels: + - dependencies + - maven + rebase-strategy: "disabled" + + - package-ecosystem: npm + directory: "/graylog2-web-interface/packages/graylog-web-plugin" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + open-pull-requests-limit: 10 + labels: + - dependencies + ignore: + - dependency-name: react-bootstrap + versions: + - ">= 1.a" + - "< 2" + rebase-strategy: "disabled" + + - package-ecosystem: npm + directory: "/graylog2-web-interface" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + open-pull-requests-limit: 10 + labels: + - dependencies + ignore: + - dependency-name: bootstrap + versions: + - ">= 4.a" + - "< 5" + rebase-strategy: "disabled" + groups: + mantine: + patterns: + - "@mantine/*" + openfonts: + patterns: + - "@openfonts/*" + tanstack: + patterns: + - "@tanstack/*" + + - package-ecosystem: npm + directory: "/graylog2-web-interface/packages/babel-preset-graylog" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + open-pull-requests-limit: 10 + labels: + - dependencies + rebase-strategy: "disabled" + groups: + babel: + patterns: + - "@babel/*" + + - package-ecosystem: npm + directory: "/graylog2-web-interface/packages/eslint-config-graylog" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + open-pull-requests-limit: 10 + labels: + - dependencies + rebase-strategy: "disabled" + groups: + typescript-eslint: + patterns: + - "@typescript-eslint/*" + + - package-ecosystem: npm + directory: "/graylog2-web-interface/packages/jest-preset-graylog" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + open-pull-requests-limit: 10 + labels: + - dependencies + rebase-strategy: "disabled" + groups: + jest: + patterns: + - "babel-jest" + - "jest" + - "jest-environment-*" + - "@jest/types" + + - package-ecosystem: npm + directory: "/graylog2-web-interface/packages/stylelint-config-graylog" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + open-pull-requests-limit: 10 + labels: + - dependencies + rebase-strategy: "disabled" + + - package-ecosystem: npm + directory: "/graylog2-web-interface/docs" + schedule: + interval: daily + time: "02:00" + timezone: "Europe/Berlin" + open-pull-requests-limit: 10 + labels: + - dependencies + rebase-strategy: "disabled" + diff --git a/.github/scripts/web-code-quality.sh b/.github/scripts/web-code-quality.sh new file mode 100755 index 000000000000..5d70af3fa046 --- /dev/null +++ b/.github/scripts/web-code-quality.sh @@ -0,0 +1,37 @@ +#!/bin/sh + +PROBLEM_COUNT=`jq '. | map(.errorCount + .warningCount)|add' /tmp/report.json` +PROBLEM_COUNT_STYLELINT=`jq '. | map(.warnings | length)|add' /tmp/report-stylelint.json` +CURRENT_REF=`git rev-parse HEAD` +TIMESTAMP=`git show --format=%at $CURRENT_REF|head -1` +CRC_USAGES=`grep -lr createReactClass src|wc -l` +REFLUX_USAGES=`grep -lr Reflux src|wc -l` + +JS_FILES=`find src -name \*.js -o -name \*.jsx|wc -l` +TS_FILES=`find src -name \*.ts -o -name \*.tsx|wc -l` + +ENZYME_TESTS=`grep -lr wrappedEnzyme src|wc -l` +TESTING_LIBRARY_TESTS=`grep -lr wrappedTestingLibrary src|wc -l` + +PAYLOAD=$(cat <<- EOF + { + "version": "1.1", + "host": "developermetrics", + "job": "fix-linter-hints", + "short_message": "Found ${PROBLEM_COUNT} ESLint and ${PROBLEM_COUNT_STYLELINT} Stylelint problems in commit ${CURRENT_REF}", + "_problems": ${PROBLEM_COUNT}, + "_problems_stylelint": ${PROBLEM_COUNT_STYLELINT}, + "_reflux_usages": ${REFLUX_USAGES}, + "_create_react_class_usages": ${CRC_USAGES}, + "_current_ref": "${CURRENT_REF}", + "_js_files": ${JS_FILES}, + "_ts_files": ${TS_FILES}, + "_enzyme_tests": ${ENZYME_TESTS}, + "_testing_library_tests": ${TESTING_LIBRARY_TESTS}, + "timestamp": ${TIMESTAMP} + } +EOF +) + +echo $PAYLOAD|jq . + diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000000..171a0e41fd85 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,32 @@ +name: Build the repository + +on: + pull_request: + +# Cancel running build when new ref gets pushed. +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest-4-cores-public + strategy: + matrix: + java-version: [ 17 ] + steps: + - uses: actions/checkout@v4 + - name: Set up JDK ${{ matrix.java-version }} + uses: actions/setup-java@v3 + with: + java-version: ${{ matrix.java-version }} + distribution: temurin + - name: License check + run: ./mvnw -B --fail-fast license:check + - name: Build with Maven + run: ./mvnw -B --fail-fast -Pedantic -Dspotbugs.skip -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 verify javadoc:javadoc + env: + JAVA_OPTS: -Xmx6G + TIMEOUT_MULTIPLIER: 2.0 + + diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml new file mode 100644 index 000000000000..62fa478bb56c --- /dev/null +++ b/.github/workflows/changelog.yml @@ -0,0 +1,17 @@ +name: "Changelog Check" + +on: + pull_request: + types: + - "opened" + - "synchronize" + - "reopened" + - "edited" + +jobs: + test: + name: "Check Changelog presence" + runs-on: "ubuntu-latest" + + steps: + - uses: "Graylog2/actions/changelog-check@main" diff --git a/.github/workflows/dispatch-main-build.yml b/.github/workflows/dispatch-main-build.yml new file mode 100644 index 000000000000..3bc157ab7aaa --- /dev/null +++ b/.github/workflows/dispatch-main-build.yml @@ -0,0 +1,29 @@ +name: Request a dispatched main branch build + +on: + push: + branches: [ master, main, "[0-9]+.[0-9]+" ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + dispatchedMainBuild: + name: Dispatch build + runs-on: ubuntu-latest + + steps: + # TODO remove later + - name: Debug event + run: jq . "$GITHUB_EVENT_PATH" + + - name: Dispatch build to graylog-project-internal + run: > + gh workflow run -R Graylog2/graylog-project-internal main-build.yml --ref master + -f caller_repo=${{ github.repository }} + -f caller_branch=${{ github.base_ref || github.ref_name }} + -f head_sha=${{ github.sha }} + -f initial_actor="${{ github.actor }}/${{ github.triggering_actor }}" + env: + GITHUB_TOKEN: ${{ secrets.PAT_GRAYLOG_PROJECT_INTERNAL_WORKFLOW_RW }} diff --git a/.github/workflows/dispatch-pr-build.yml b/.github/workflows/dispatch-pr-build.yml new file mode 100644 index 000000000000..da61dd83ec68 --- /dev/null +++ b/.github/workflows/dispatch-pr-build.yml @@ -0,0 +1,52 @@ +name: Request dispatched PR Build + +on: + pull_request: + types: [ opened, reopened, synchronize, edited ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + dispatchedPR: + name: Dispatch wait and check + runs-on: ubuntu-latest + + steps: + - name: Check if PR edit changed deps string + if: ${{ github.event.action == 'edited' }} + id: pr-string-changed + continue-on-error: true + run: | + old_pr_string=$(grep -P '^/(jenkins-pr-deps|jpd|prd)' <<< "$OLD_PR_BODY" | \ + grep -ioP '(Graylog2/\S+?#|https?://github.com/Graylog2/\S+?/pull/)[0-9]+' || true) + new_pr_string=$(grep -P '^/(jenkins-pr-deps|jpd|prd)' <<< "$NEW_PR_BODY" | \ + grep -ioP '(Graylog2/\S+?#|https?://github.com/Graylog2/\S+?/pull/)[0-9]+' || true) + if [ "$old_pr_string" != "$new_pr_string" ]; then + echo "PR deps string change detected: \"$old_pr_string\" -> \"$new_pr_string\"" + echo "Re-triggering PR build..." + exit 0 + fi + exit 1 + env: + OLD_PR_BODY: "${{ github.event.changes.body.from }}" + NEW_PR_BODY: "${{ github.event.pull_request.body }}" + + - name: Dispatch job to graylog-project-internal + if: ${{ github.event.action != 'edited' || steps.pr-string-changed.outcome == 'success' }} + run: > + gh workflow run -R Graylog2/graylog-project-internal pr-build.yml --ref master + -f caller_repo=${{ github.repository }} + -f caller_pr_nr=${{ github.event.number }} + -f caller_base_branch=${{ github.base_ref || github.ref_name }} + -f caller_head_branch=${{ github.head_ref }} + -f head_sha=${{ github.event.pull_request.head.sha }} + -f initial_actor="${{ github.actor }}/${{ github.triggering_actor }}" + env: + GITHUB_TOKEN: ${{ secrets.PAT_GRAYLOG_PROJECT_INTERNAL_WORKFLOW_RW }} + + - name: Give dispatched build time to add its status + run: sleep 20 + # This is mostly cosmetic. If this workflow finishes before the dispatched + # build creates the status on the PR, the build will show up green for a while. diff --git a/.github/workflows/fix-linter-hints.yml b/.github/workflows/fix-linter-hints.yml new file mode 100644 index 000000000000..2a3f8f093f51 --- /dev/null +++ b/.github/workflows/fix-linter-hints.yml @@ -0,0 +1,84 @@ +name: Fix linter hints + +on: + schedule: + - cron: '0 */8 * * MON-FRI' + + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + java-version: [ 17 ] + + steps: + - name: Freeing up more disk space + run: | + sudo rm -rf /usr/local/lib/android # will release about 10 GB if you don't need Android + sudo rm -rf /usr/share/dotnet # will release about 20GB if you don't need .NET + - uses: actions/checkout@v4 + - name: Set up JDK ${{ matrix.java-version }} + uses: actions/setup-java@v3 + with: + java-version: ${{ matrix.java-version }} + distribution: temurin + cache: maven + - name: Build with Maven + run: ./mvnw -B --fail-fast -Dspotbugs.skip -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 -DskipTests compile + env: + JAVA_OPTS: -Xmx6G + - name: Resetting lockfile + working-directory: graylog2-web-interface + run: git checkout yarn.lock + - name: Run lint --fix + continue-on-error: true + working-directory: graylog2-web-interface + run: yarn lint --fix -o /tmp/report.json -f json + - name: Run prettier + working-directory: graylog2-web-interface + run: yarn format + - name: Run lint:styles --fix + continue-on-error: true + working-directory: graylog2-web-interface + run: yarn lint:styles --fix -o /tmp/report-stylelint.json -f json + - name: Submit Results + continue-on-error: true + working-directory: graylog2-web-interface + run: | + /bin/sh ../.github/scripts/web-code-quality.sh | curl -X POST -H 'Content-Type: application/json' -d @- ${{ secrets.DEV_METRICS_URL }} + - name: Create/Update Pull Request + id: pr-create + uses: peter-evans/create-pull-request@b3a2c5d525a1eb75e75c781e45a1bf3bfc23bdc6 + with: + title: Fixing linter hints automatically + body: This PR was created by a job that is running periodically to find and fix linter hints. + author: Dr. Lint-a-lot + branch: fix/linter-hints + committer: Dr. Lint-a-lot + commit-message: Running lint --fix & prettier + delete-branch: true + + - name: Get headRef and SHA of PR + id: pr-infos + if: ${{ steps.pr-create.outputs.pull-request-number && steps.pr-create.outputs.pull-request-operation != 'closed' }} + run: | + HEADREF=$(gh pr view ${{ steps.pr-create.outputs.pull-request-number }} --json headRefName --template '{{.headRefName}}') + echo "sha=$(git rev-parse origin/$HEADREF)" >> $GITHUB_OUTPUT + echo "headref=$HEADREF" >> $GITHUB_OUTPUT + env: + GH_TOKEN: ${{ github.token }} + + - name: Request dispatched PR build + if: ${{ steps.pr-create.outputs.pull-request-number && steps.pr-create.outputs.pull-request-operation != 'closed' }} + run: > + gh workflow run -R Graylog2/graylog-project-internal pr-build.yml --ref master + -f caller_repo=${{ github.repository }} + -f caller_pr_nr=${{ steps.pr-create.outputs.pull-request-number }} + -f caller_base_branch=master + -f caller_head_branch=${{ steps.pr-infos.outputs.headref }} + -f head_sha=${{ steps.pr-infos.outputs.sha }} + -f initial_actor="Dr. Lint-a-lot" + env: + GITHUB_TOKEN: ${{ secrets.PAT_GRAYLOG_PROJECT_INTERNAL_WORKFLOW_RW }} diff --git a/.github/workflows/no-response.yml b/.github/workflows/no-response.yml new file mode 100644 index 000000000000..6946db3a99bb --- /dev/null +++ b/.github/workflows/no-response.yml @@ -0,0 +1,20 @@ +name: No Response + +# Both `issue_comment` and `scheduled` event types are required for this Action +# to work properly. +on: + issue_comment: + types: [created] + schedule: + # Schedule for five minutes after the hour, every hour + - cron: '5 * * * *' + +jobs: + noResponse: + runs-on: ubuntu-latest + steps: + - uses: lee-dohm/no-response@v0.5.0 + with: + token: ${{ github.token }} + responseRequiredLabel: needs-input + daysUntilClose: 30 diff --git a/.github/workflows/notify-upgrade-notes.yml b/.github/workflows/notify-upgrade-notes.yml new file mode 100644 index 000000000000..2e85d0903f0b --- /dev/null +++ b/.github/workflows/notify-upgrade-notes.yml @@ -0,0 +1,31 @@ +# This workflow sends a Slack notification to the configured channels +# when the UPGRADING.md file is updated. This is used by the docs team. + +name: "Notify UPGRADING.md update" +run-name: "Notify UPGRADING.md update (${{ github.ref_name}}/${{ github.sha }})" + +on: + workflow_dispatch: + push: + branches: + - "master" + - "[0-9].[0-9]" # Stable branches + paths: + - "UPGRADING.md" + +jobs: + notify: + runs-on: "ubuntu-latest" + + steps: + - name: "Send Slack message" + uses: "slackapi/slack-github-action@v1" + env: + SLACK_BOT_TOKEN: "${{ secrets.SLACK_BOT_TOKEN }}" + with: + channel-id: "${{ vars.SLACK_UPGRADE_NOTIFY_CHANNELS }}" + slack-message: | + The Graylog ${{ github.ref_name || 'test' }}/UPGRADING.md file was updated: + + Content: https://github.com/Graylog2/graylog2-server/blob/${{ github.sha || 'test' }}/UPGRADING.md + Commit: https://github.com/Graylog2/graylog2-server/commit/${{ github.sha || 'test' }} diff --git a/.github/workflows/reviewbot.yml b/.github/workflows/reviewbot.yml new file mode 100644 index 000000000000..f902e2402bc6 --- /dev/null +++ b/.github/workflows/reviewbot.yml @@ -0,0 +1,32 @@ +name: PR Review + +on: + pull_request: + paths: + - '**.[jt]sx?' + types: + - opened + - edited + - synchronize + +jobs: + linter: + name: Reviewbot + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up JDK + uses: actions/setup-java@v3 + with: + java-version: 17 + distribution: temurin + cache: maven + - name: Compile with Maven / Install dependencies + run: ./mvnw --fail-fast -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 compile + - name: Reviewbot + uses: Graylog2/reviewbot@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + prefix: graylog2-web-interface diff --git a/.github/workflows/update-browserslist-db.yml b/.github/workflows/update-browserslist-db.yml new file mode 100644 index 000000000000..e10b54a2a47c --- /dev/null +++ b/.github/workflows/update-browserslist-db.yml @@ -0,0 +1,56 @@ +name: Update browserslist DB. + +on: + schedule: + - cron: '0 0 1/14 * MON-FRI' + + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + defaults: + run: + working-directory: graylog2-web-interface + + steps: + - uses: actions/checkout@v4 + - name: Install dependencies + run: yarn install + - name: Updating browserslist db + continue-on-error: true + run: npx browserslist@latest --update-db + - name: Create/Update Pull Request + id: pr-create + uses: peter-evans/create-pull-request@b3a2c5d525a1eb75e75c781e45a1bf3bfc23bdc6 + with: + title: Updating browserslist DB. + body: This PR was created by a job that is running periodically to update the browserslist db automatically. See [here](https://github.com/browserslist/browserslist#browsers-data-updating) for details. + author: Dr. Lint-a-lot + branch: update/browserslist-db + committer: Dr. Lint-a-lot + commit-message: Updating browserslist DB. + delete-branch: true + + - name: Get headRef and SHA of PR + id: pr-infos + if: ${{ steps.pr-create.outputs.pull-request-number && steps.pr-create.outputs.pull-request-operation != 'closed' }} + run: | + HEADREF=$(gh pr view ${{ steps.pr-create.outputs.pull-request-number }} --json headRefName --template '{{.headRefName}}') + echo "sha=$(git rev-parse origin/$HEADREF)" >> $GITHUB_OUTPUT + echo "headref=$HEADREF" >> $GITHUB_OUTPUT + env: + GH_TOKEN: ${{ github.token }} + + - name: Request dispatched PR build + if: ${{ steps.pr-create.outputs.pull-request-number && steps.pr-create.outputs.pull-request-operation != 'closed' }} + run: > + gh workflow run -R Graylog2/graylog-project-internal pr-build.yml --ref master + -f caller_repo=${{ github.repository }} + -f caller_pr_nr=${{ steps.pr-create.outputs.pull-request-number }} + -f caller_base_branch=master + -f caller_head_branch=${{ steps.pr-infos.outputs.headref }} + -f head_sha=${{ steps.pr-infos.outputs.sha }} + -f initial_actor="Dr. Lint-a-lot" + env: + GITHUB_TOKEN: ${{ secrets.PAT_GRAYLOG_PROJECT_INTERNAL_WORKFLOW_RW }} diff --git a/.github/workflows/updating-lockfile.yml b/.github/workflows/updating-lockfile.yml new file mode 100644 index 000000000000..ce93be0dd48f --- /dev/null +++ b/.github/workflows/updating-lockfile.yml @@ -0,0 +1,53 @@ +name: Updating yarn lockfile + +on: + schedule: + - cron: '0 */12 * * MON-FRI' + + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + defaults: + run: + working-directory: graylog2-web-interface + + steps: + - uses: actions/checkout@v3 + - name: Install dependencies + run: yarn install + - name: Create/Update Pull Request + id: pr-create + uses: peter-evans/create-pull-request@b3a2c5d525a1eb75e75c781e45a1bf3bfc23bdc6 + with: + title: Updating yarn lockfile + body: This PR was created by a job that is running periodically to update the yarn lockfile after transitive dependencies have been updated. + author: Gary Bot + branch: update-yarn-lockfile + committer: Gary Bot + commit-message: Updating yarn lockfile + delete-branch: true + + - name: Get headRef and SHA of PR + id: pr-infos + if: ${{ steps.pr-create.outputs.pull-request-number && steps.pr-create.outputs.pull-request-operation != 'closed' }} + run: | + HEADREF=$(gh pr view ${{ steps.pr-create.outputs.pull-request-number }} --json headRefName --template '{{.headRefName}}') + echo "sha=$(git rev-parse origin/$HEADREF)" >> $GITHUB_OUTPUT + echo "headref=$HEADREF" >> $GITHUB_OUTPUT + env: + GH_TOKEN: ${{ github.token }} + + - name: Request dispatched PR build + if: ${{ steps.pr-create.outputs.pull-request-number && steps.pr-create.outputs.pull-request-operation != 'closed' }} + run: > + gh workflow run -R Graylog2/graylog-project-internal pr-build.yml --ref master + -f caller_repo=${{ github.repository }} + -f caller_pr_nr=${{ steps.pr-create.outputs.pull-request-number }} + -f caller_base_branch=master + -f caller_head_branch=${{ steps.pr-infos.outputs.headref }} + -f head_sha=${{ steps.pr-infos.outputs.sha }} + -f initial_actor="Dr. Lint-a-lot" + env: + GITHUB_TOKEN: ${{ secrets.PAT_GRAYLOG_PROJECT_INTERNAL_WORKFLOW_RW }} diff --git a/.gitignore b/.gitignore index feebd031432b..58b9cd558ff9 100644 --- a/.gitignore +++ b/.gitignore @@ -40,3 +40,4 @@ dependency-reduced-pom.xml **/hs_err_pid*.log +data-node/bin/* diff --git a/.mvn/README.md b/.mvn/README.md new file mode 100644 index 000000000000..e6f4d5b8be28 --- /dev/null +++ b/.mvn/README.md @@ -0,0 +1,3 @@ +## How to create/update the maven wrapper + +`mvn wrapper:wrapper -Dtype=only-script -Dmaven=` diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 000000000000..32599cefea51 --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,10 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 000000000000..d58dfb70bab5 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,19 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +wrapperVersion=3.3.2 +distributionType=only-script +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 7b5021eefede..000000000000 --- a/.travis.yml +++ /dev/null @@ -1,54 +0,0 @@ -sudo: required -dist: trusty -language: java -jdk: - - oraclejdk8 -cache: - directories: - - $HOME/.m2 -notifications: - slack: - on_success: change - on_failure: always - rooms: - secure: |- - g6Gpj2n1Bl6hIwWlywEkw8wRjpN2XuFsxFr9KQu/p7mRLYPdF3V5qQPVLBRd - KvkjCIM7gZy2DTRNUooQPXdJB3npbnlbQn4jNWqA7/fp434Sw5sdfSUMawGr - XvemLvn0KxQxCO9GfN3wfmYxEWJwXO36Q29BnRWWgMQOXpFFD8A= -before_script: - - sudo sysctl -w vm.max_map_count=262144 -script: - - mvn -B verify -Dit.ElasticsearchVersion=${GRAYLOG_ELASTICSEARCH_VERSION} -after_success: - - mvn -B -Dmaven.test.skip=true -Dskip.web.build=true assembly:single - - mvn -B -Dmaven.test.skip=true -Dskip.web.build=true --settings config/settings.xml deploy -deploy: - provider: s3 - access_key_id: AKIAIGYGO43W76PZMMVA - secret_access_key: - secure: cYHsMgnAI08sz9G1Qb7ku2HoTaSn/OBBmjBzyYswX0/Kvb2asQcEGZGOEfYNTGOwdN6DP7p3axi5o+FPw2MLGGpBVz3tAtwYWTbzJR8kCEuGwb8N6abpdkGIn4AfifQ+vbn8zJrIB6HfGGRpDM9QnEjJFtpJO+2SjODzbZzkymE= - bucket: graylog2-builds - region: eu-west-1 - skip_cleanup: true - local-dir: target/assembly - on: - repo: Graylog2/graylog2-server - jdk: oraclejdk8 - branch: - - master - - 1.0 - - 1.1 - - 1.2 - - 1.3 - - 2.0 - - 2.1 - - 2.2 - - 2.3 - - 2.4 -env: - global: - - secure: MYZwUwFkHwWfJ79JKyDK8VrYVcsax4t+7atMLLVNI4CDxTWZzR4qFGUfauf+7fDEmnGYbMHDRSnzzhVtSR0ZSuvWoSkZ+v62ASmSfglzI2GcMD/VBREq+9TlLasSIa+wR60VvgYwxJnawwJlV6sbjmetT6MWug7/icdi5KgfDlQ= - - secure: r0enVlGBPjqaGfNrD1b6YxgRaSp/v39ZMKQFs/C0RtRKliZ5LVo7kS4lL0tLQoqMAXJel0NgsVH6j1P6428uLQDxqKmCsNZiX0VS9K3Z0iq5NG4x/5QidxyXjHefSGmpquLmZbM094xhtvdou3NEfkJTKbxHu3ML9i9ScMn5+vA= - - secure: "OiqpeUFVXO/sWdSA4WZIrK5UvVUN4lG5YWm41e1P+6lvOAHpWeyX6fIs9Ndz12Vio/Wlt03ixzCAO/67UW4XGv/e3tClmvOSKt811Ml8wu3ALMxdV2218OkFXGzQ8AOnIRK1AAln3JdvBMHj8wFoUmGBaHJ7odqOxu3WC4m0Xr0=" - matrix: - - GRAYLOG_ELASTICSEARCH_VERSION=5 diff --git a/AUTHORS b/AUTHORS index e08bfeee7fdb..ab8a189ba4f8 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1 +1 @@ -Lennart Koopmann +All the amazing people at Graylog, Inc, it's subsidiaries and third party contributors. diff --git a/COPYING b/COPYING deleted file mode 100644 index 94a9ed024d38..000000000000 --- a/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000000..4e1383df1ee4 --- /dev/null +++ b/LICENSE @@ -0,0 +1,557 @@ + Server Side Public License + VERSION 1, OCTOBER 16, 2018 + + Copyright © 2018 MongoDB, Inc. + + Everyone is permitted to copy and distribute verbatim copies of this + license document, but changing it is not allowed. + + TERMS AND CONDITIONS + + 0. Definitions. + + “This License” refers to Server Side Public License. + + “Copyright” also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + “The Program” refers to any copyrightable work licensed under this + License. Each licensee is addressed as “you”. “Licensees” and + “recipients” may be individuals or organizations. + + To “modify” a work means to copy from or adapt all or part of the work in + a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a “modified version” of the + earlier work or a work “based on” the earlier work. + + A “covered work” means either the unmodified Program or a work based on + the Program. + + To “propagate” a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To “convey” a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through a + computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays “Appropriate Legal Notices” to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties + are provided), that licensees may convey the work under this License, and + how to view a copy of this License. If the interface presents a list of + user commands or options, such as a menu, a prominent item in the list + meets this criterion. + + 1. Source Code. + + The “source code” for a work means the preferred form of the work for + making modifications to it. “Object code” means any non-source form of a + work. + + A “Standard Interface” means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. The “System + Libraries” of an executable work include anything, other than the work as + a whole, that (a) is included in the normal form of packaging a Major + Component, but which is not part of that Major Component, and (b) serves + only to enable use of the work with that Major Component, or to implement + a Standard Interface for which an implementation is available to the + public in source code form. A “Major Component”, in this context, means a + major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or + a compiler used to produce the work, or an object code interpreter used + to run it. + + The “Corresponding Source” for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control + those activities. However, it does not include the work's System + Libraries, or general-purpose tools or generally available free programs + which are used unmodified in performing those activities but which are + not part of the work. For example, Corresponding Source includes + interface definition files associated with source files for the work, and + the source code for shared libraries and dynamically linked subprograms + that the work is specifically designed to require, such as by intimate + data communication or control flow between those subprograms and other + parts of the work. + + The Corresponding Source need not include anything that users can + regenerate automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program, subject to section 13. The + output from running a covered work is covered by this License only if the + output, given its content, constitutes a covered work. This License + acknowledges your rights of fair use or other equivalent, as provided by + copyright law. Subject to section 13, you may make, run and propagate + covered works that you do not convey, without conditions so long as your + license otherwise remains in force. You may convey covered works to + others for the sole purpose of having them make modifications exclusively + for you, or provide you with facilities for running those works, provided + that you comply with the terms of this License in conveying all + material for which you do not control copyright. Those thus making or + running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them + from making any copies of your copyrighted material outside their + relationship with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article 11 + of the WIPO copyright treaty adopted on 20 December 1996, or similar laws + prohibiting or restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the + covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's users, + your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; keep + intact all notices stating that this License and any non-permissive terms + added in accord with section 7 apply to the code; keep intact all notices + of the absence of any warranty; and give all recipients a copy of this + License along with the Program. You may charge any price or no price for + each copy that you convey, and you may offer support or warranty + protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the terms + of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to “keep intact all + notices”. + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the + whole of the work, and all its parts, regardless of how they are + packaged. This License gives no permission to license the work in any + other way, but it does not invalidate such permission if you have + separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work + need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, and + which are not combined with it such as to form a larger program, in or on + a volume of a storage or distribution medium, is called an “aggregate” if + the compilation and its resulting copyright are not used to limit the + access or legal rights of the compilation's users beyond what the + individual works permit. Inclusion of a covered work in an aggregate does + not cause this License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these + ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily + used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you + offer spare parts or customer support for that product model, to give + anyone who possesses the object code either (1) a copy of the + Corresponding Source for all the software in the product that is + covered by this License, on a durable physical medium customarily used + for software interchange, for a price no more than your reasonable cost + of physically performing this conveying of source, or (2) access to + copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This alternative is + allowed only occasionally and noncommercially, and only if you received + the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to copy + the object code is a network server, the Corresponding Source may be on + a different server (operated by you or a third party) that supports + equivalent copying facilities, provided you maintain clear directions + next to the object code saying where to find the Corresponding Source. + Regardless of what server hosts the Corresponding Source, you remain + obligated to ensure that it is available for as long as needed to + satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of + the work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be included + in conveying the object code work. + + A “User Product” is either (1) a “consumer product”, which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, “normally used” refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + “Installation Information” for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part + of a transaction in which the right of possession and use of the User + Product is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the + Installation Information. But this requirement does not apply if neither + you nor any third party retains the ability to install modified object + code on the User Product (for example, the work has been installed in + ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access + to a network may be denied when the modification itself materially + and adversely affects the operation of the network or violates the + rules and protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + “Additional permissions” are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall be + treated as though they were included in this License, to the extent that + they are valid under applicable law. If additional permissions apply only + to part of the Program, that part may be used separately under those + permissions, but the entire Program remains governed by this License + without regard to the additional permissions. When you convey a copy of + a covered work, you may at your option remove any additional permissions + from that copy, or from any part of it. (Additional permissions may be + written to require their own removal in certain cases when you modify the + work.) You may place additional permissions on material, added by you to + a covered work, for which you have or can give appropriate copyright + permission. + + Notwithstanding any other provision of this License, for material you add + to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material + by anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any + liability that these contractual assumptions directly impose on those + licensors and authors. + + All other non-permissive additional terms are considered “further + restrictions” within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you + may add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. Additional terms, permissive or non-permissive, may be + stated in the form of a separately written license, or stated as + exceptions; the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or modify + it is void, and will automatically terminate your rights under this + License (including any patent licenses granted under the third paragraph + of section 11). + + However, if you cease all violation of this License, then your license + from a particular copyright holder is reinstated (a) provisionally, + unless and until the copyright holder explicitly and finally terminates + your license, and (b) permanently, if the copyright holder fails to + notify you of the violation by some reasonable means prior to 60 days + after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and + you cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a + copy likewise does not require acceptance. However, nothing other than + this License grants you permission to propagate or modify any covered + work. These actions infringe copyright if you do not accept this License. + Therefore, by modifying or propagating a covered work, you indicate your + acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives + a license from the original licensors, to run, modify and propagate that + work, subject to this License. You are not responsible for enforcing + compliance by third parties with this License. + + An “entity transaction” is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work + the party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can + get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted + under this License, and you may not initiate litigation (including a + cross-claim or counterclaim in a lawsuit) alleging that any patent claim + is infringed by making, using, selling, offering for sale, or importing + the Program or any portion of it. + + 11. Patents. + + A “contributor” is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The work + thus licensed is called the contributor's “contributor version”. + + A contributor's “essential patent claims” are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this + License, of making, using, or selling its contributor version, but do not + include claims that would be infringed only as a consequence of further + modification of the contributor version. For purposes of this definition, + “control” includes the right to grant patent sublicenses in a manner + consistent with the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to make, + use, sell, offer for sale, import and otherwise run, modify and propagate + the contents of its contributor version. + + In the following three paragraphs, a “patent license” is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To “grant” such a patent license to a party + means to make such an agreement or commitment not to enforce a patent + against the party. + + If you convey a covered work, knowingly relying on a patent license, and + the Corresponding Source of the work is not available for anyone to copy, + free of charge and under the terms of this License, through a publicly + available network server or other readily accessible means, then you must + either (1) cause the Corresponding Source to be so available, or (2) + arrange to deprive yourself of the benefit of the patent license for this + particular work, or (3) arrange, in a manner consistent with the + requirements of this License, to extend the patent license to downstream + recipients. “Knowingly relying” means you have actual knowledge that, but + for the patent license, your conveying the covered work in a country, or + your recipient's use of the covered work in a country, would infringe + one or more identifiable patents in that country that you have reason + to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties receiving + the covered work authorizing them to use, propagate, modify or convey a + specific copy of the covered work, then the patent license you grant is + automatically extended to all recipients of the covered work and works + based on it. + + A patent license is “discriminatory” if it does not include within the + scope of its coverage, prohibits the exercise of, or is conditioned on + the non-exercise of one or more of the rights that are specifically + granted under this License. You may not convey a covered work if you are + a party to an arrangement with a third party that is in the business of + distributing software, under which you make payment to the third party + based on the extent of your activity of conveying the work, and under + which the third party grants, to any of the parties who would receive the + covered work from you, a discriminatory patent license (a) in connection + with copies of the covered work conveyed by you (or copies made from + those copies), or (b) primarily for and in connection with specific + products or compilations that contain the covered work, unless you + entered into that arrangement, or that patent license was granted, prior + to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot use, + propagate or convey a covered work so as to satisfy simultaneously your + obligations under this License and any other pertinent obligations, then + as a consequence you may not use, propagate or convey it at all. For + example, if you agree to terms that obligate you to collect a royalty for + further conveying from those to whom you convey the Program, the only way + you could satisfy both those terms and this License would be to refrain + entirely from conveying the Program. + + 13. Offering the Program as a Service. + + If you make the functionality of the Program or a modified version + available to third parties as a service, you must make the Service Source + Code available via network download to everyone at no charge, under the + terms of this License. Making the functionality of the Program or + modified version available to third parties as a service includes, + without limitation, enabling third parties to interact with the + functionality of the Program or modified version remotely through a + computer network, offering a service the value of which entirely or + primarily derives from the value of the Program or modified version, or + offering a service that accomplishes for users the primary purpose of the + Program or modified version. + + “Service Source Code” means the Corresponding Source for the Program or + the modified version, and the Corresponding Source for all programs that + you use to make the Program or modified version available as a service, + including, without limitation, management software, user interfaces, + application program interfaces, automation software, monitoring software, + backup software, storage software and hosting software, all such that a + user could run an instance of the service using the Service Source Code + you make available. + + 14. Revised Versions of this License. + + MongoDB, Inc. may publish revised and/or new versions of the Server Side + Public License from time to time. Such new versions will be similar in + spirit to the present version, but may differ in detail to address new + problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the Server Side Public + License “or any later version” applies to it, you have the option of + following the terms and conditions either of that numbered version or of + any later version published by MongoDB, Inc. If the Program does not + specify a version number of the Server Side Public License, you may + choose any version ever published by MongoDB, Inc. + + If the Program specifies that a proxy can decide which future versions of + the Server Side Public License can be used, that proxy's public statement + of acceptance of a version permanently authorizes you to choose that + version for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING + ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF + THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU + OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. + + END OF TERMS AND CONDITIONS diff --git a/README.markdown b/README.markdown index 6b5f148acda1..4dfe648c918f 100644 --- a/README.markdown +++ b/README.markdown @@ -1,10 +1,9 @@ # Graylog -[![Build Status](https://travis-ci.org/Graylog2/graylog2-server.svg?branch=master)](https://travis-ci.org/Graylog2/graylog2-server) -[![License](https://img.shields.io/github/license/Graylog2/graylog2-server.svg)](https://www.gnu.org/licenses/gpl-3.0.txt) +[![License](https://img.shields.io/badge/license-SSPL-green)](https://www.mongodb.com/licensing/server-side-public-license) [![Maven Central](https://img.shields.io/maven-central/v/org.graylog2/graylog2-server.svg)](http://mvnrepository.com/artifact/org.graylog2/graylog2-server) -Welcome! _Graylog_ is an open source log management platform. +Welcome! _Graylog_ is a free and open log management platform. You can read more about the project on our [website](https://www.graylog.org/) and check out the [documentation](http://docs.graylog.org/) on the documentation site. @@ -20,19 +19,12 @@ Help us build the future of log management and be part of a project that is used Follow the [contributors guide](https://www.graylog.org/community) and read [the contributing instructions](CONTRIBUTING.md) to get started. +Do you want to get paid for developing our free and open product? Apply for one of our [jobs](https://www.graylog.org/careers)! ## Staying in Touch -Come chat with us in the [`#graylog` channel on freenode IRC](https://webchat.freenode.net/?channels=%23graylog) or create a topic in our [community discussion forums](https://community.graylog.org/). - - -## Miscellaneous - -![YourKit](https://s3.amazonaws.com/graylog2public/images/yourkit.png) - -YourKit supports our open source project by sponsoring its full-featured Java Profiler. YourKit, LLC is the creator of [YourKit Java Profiler](http://www.yourkit.com/java/profiler/index.jsp) and [YourKit .NET Profiler](http://www.yourkit.com/.net/profiler/index.jsp), innovative and intelligent tools for profiling Java and .NET applications. - +Come chat with us in the [`#graylog` channel on freenode IRC](https://webchat.freenode.net/?channels=%23graylog), the `#graylog` channel on [libera](https://libera.chat/) or create a topic in our [community discussion forums](https://community.graylog.org/). ## License -_Graylog_ is released under version 3.0 of the [GNU General Public License](COPYING). +_Graylog_ is released under version 1 of the [Server Side Public License (SSPL)](LICENSE). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000000..d4223c92de2e --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,18 @@ +# Graylog Security Policy + +## Supported Versions + +Graylog is addressing vulnerabilities in the product for the current and the previous releases (a release is anything that increases either the major or the minor version part, in a [semver](https://semver.org) understanding) of the last twelve months. + +We highly recommend anyone using a version that is older than twelve months _or_ the last two releases to upgrade as soon as possible. + +## Reporting a Vulnerability + +We are grateful for anyone reporting a vulnerability, helping us to make Graylog better and more secure. Additionally, we encourage everyone to disclose bugs in a responsible way, allowing us and other Graylog users to react accordingly in a timely manner. +That means: + + - If you want to report a *critical bug* that could: allow someone to steal credentials, execute code or escalate privileges, please send a bug report to security@graylog.com before publishing it. This allows us to fix it, create a new version and allows other Graylog users to update before the information is out in the wild. After receiving the bug report, we will immediately get back to you to coordinate the required action. + - If you want to report a *non-critical bug*, write to security@graylog.com _or_ [open an issue on github](https://github.com/Graylog2/graylog2-server/issues/new). + - This is an open source project. If you discover a bug and fix it, you are very welcome to submit a PR. You will be rewarded with the everlasting gratitude of the Graylog team and the community! + + Thanks and happy logging! diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 000000000000..f200f4558d02 --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,59 @@ +Upgrading to Graylog 6.2.x +========================== + +## Breaking Changes + +### Plugins +* This release includes Java API changes which might require plugin authors to adjust their code. Please check + [Java API Changes](#java-api-changes) for details. +* Adjustment of `enterpriseWidgets` web interface plugin. The `editComponent` attribute now no longer has a `onSubmit` prop. + Before this change the prop had to be called to close the widget edit mode. Now it is enough to call `applyAllWidgetChanges` from the `WidgetEditApplyAllChangesContext`. + Alternatively the `SaveOrCancelButtons` component can be used in the edit component for custom widgets. It renders a cancel and submit button and calls `applyAllWidgetChanges` on submit. + +## Configuration File Changes + +| Option | Action | Description | +|---------------|------------|------------------------------------------------| +| `tbd` | **added** | | + +## Default Configuration Changes + +- tbd + +## Java API Changes + +Upgraded [MongoJack](https://github.com/mongojack/mongojack) to version 5.x. This impacts the Java API for accessing +documents in MongoDB. Some previously deprecated MongoJack classes (like `org.mongojack.DBQuery`) have been removed. +Plugin authors will have to replace usages of removed classes to corresponding classes from the MongoDB driver +packages, most prominently `com.mongodb.client.model.Filters`. + +Additionally, the following Java Code API changes are included in this release: + +| File/method | Description | +|-----------------------------------------------------------------------------------|------------------------------------------| +| `org.graylog.scheduler.JobSchedule#toDBUpdate` | removed | +| `org.graylog.scheduler.DBJobTriggerService#all` | replaced by streamAll | +| `org.graylog.scheduler.DBJobTriggerService#getAllForJob` | replaced by streamAllForJob | +| `org.graylog.scheduler.DBJobTriggerService#findByQuery` | replaced by streamByQuery | +| `org.graylog.events.processor.DBEventDefinitionService#getByNotificationId` | replaced by streamByNotificationId | +| `org.graylog.events.processor.DBEventDefinitionService#getSystemEventDefinitions` | replaced by streamSystemEventDefinitions | +| `org.graylog.events.processor.DBEventDefinitionService#getByArrayValue` | replaced by streamByArrayValue | +| `org.graylog2.lookup.db.DBCacheService#findByIds` | replaced by streamByIds | +| `org.graylog2.lookup.db.DBCacheService#findAll` | replaced by streamAll | +| `org.graylog2.lookup.db.DBDataAdapterService#findByIds` | replaced by streamByIds | +| `org.graylog2.lookup.db.DBDataAdapterService#findAll` | replaced by streamAll | +| `org.graylog2.lookup.db.DBLookupTableService#findByCacheIds` | replaced by streamByCacheIds | +| `org.graylog2.lookup.db.DBLookupTableService#findByDataAdapterIds` | replaced by streamByDataAdapterIds | +| `org.graylog2.lookup.db.DBLookupTableService#findAll` | replaced by streamAll | + +DBService classes' new streaming methods require streams to be closed after using - recommend using try-with-resource statements. + +## REST API Endpoint Changes + +The following REST API changes have been made. + +| Endpoint | Description | +|-----------------------------------------------------------------------|-----------------------------------------------------------------------------------------| +| `GET /plugins/org.graylog.integrations/aws/inputs/available_services` | Remove unused endpoint. | +| `GET /plugins/org.graylog.integrations/aws/inputs/permissions` | Removed permissions endpoint in favor of maintaining permissions in official docs site. | +| `/plugins/org.graylog.plugins.files/*` | Removed (Graylog Enterprise plugin). | diff --git a/UPGRADING.rst b/UPGRADING.rst deleted file mode 100644 index 643e30cd1bed..000000000000 --- a/UPGRADING.rst +++ /dev/null @@ -1,304 +0,0 @@ -************************** -Upgrading to Graylog 3.0.x -************************** - -.. _upgrade-from-24-to-30: - -This file only contains the upgrade note for the upcoming release. -Please see `our documentation `_ -for the complete upgrade notes. - -Elasticsearch Version Requirements -================================== - -Graylog 3.0 drops support for Elasticsearch versions before 5.6.x. That means you have to upgrade Elasticsearch to at least version 5.6.5 before upgrading Graylog to version 3.0. Make sure to read the Elasticsearch upgrade guides before doing that. - -Simplified HTTP interface configuration -======================================= - -Graylog used to have a lot of different settings regarding the various HTTP interfaces it provides, namely the Graylog REST API and the Graylog web interface. - -This mostly originates from the fact that Graylog used to consist of two components before Graylog 2.0.0, a server component and a separate web interface. - -The changes in this release finally merge the HTTP listeners for the Graylog REST API and web interface into a single HTTP listener, which should make the initial configuration of Graylog simpler and reduce errors caused by conflicting settings. - -The path of the Graylog REST API is now hard-coded to ``/api``, so if you're still using the legacy URI on port 12900/tcp or have been using a custom path (via the ``rest_listen_uri`` or ``rest_transport_uri`` settings), you'll have to update the URI used to access the Graylog REST API. - -If you are using a reverse proxy in front of Graylog (like nginx) and configured it to set the ``X-Graylog-Server-URL`` HTTP header, you have to remove the ``api/`` suffix because that is now the default. (as mentioned above) - -Example:: - - # This nginx setting in Graylog <3.0 ... - header_upstream X-Graylog-Server-URL http://{host}/api - - # ... needs to be changed to the following with Graylog 3.0 - header_upstream X-Graylog-Server-URL http://{host}/ - -For a more detailed description of the new HTTP settings, please consult the annotated `Graylog configuration file `__. - - -Overview of removed Graylog REST API settings: - -+----------------------------------+----------------------------------+--------------------------------+ -| Removed Setting | New Setting | Default | -+==================================+==================================+================================+ -| ``rest_listen_uri`` | ``http_bind_address`` | ``127.0.0.1:9000`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_transport_uri`` | ``http_publish_uri`` | ``http://$http_bind_address/`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``web_endpoint_uri`` | ``http_external_uri`` | ``$http_publish_uri`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_enable_cors`` | ``http_enable_cors`` | ``true`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_enable_gzip`` | ``http_enable_gzip`` | ``true`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_max_header_size`` | ``http_max_header_size`` | ``8192`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_max_initial_line_length`` | ``http_max_initial_line_length`` | ``4096`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_thread_pool_size`` | ``http_thread_pool_size`` | ``16`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_enable_tls`` | ``http_enable_tls`` | ``false`` | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_tls_cert_file`` | ``http_tls_cert_file`` | Empty | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_tls_key_file`` | ``http_tls_key_file`` | Empty | -+----------------------------------+----------------------------------+--------------------------------+ -| ``rest_tls_key_password`` | ``http_tls_key_password`` | Empty | -+----------------------------------+----------------------------------+--------------------------------+ - - -Overview of removed Graylog web interface settings: - -+---------------------------------+----------------------------------+--------------------+ -| Removed Setting | New Setting | Default | -+=================================+==================================+====================+ -| ``web_enable`` | None | | -+---------------------------------+----------------------------------+--------------------+ -| ``web_listen_uri`` | ``http_bind_address`` | ``127.0.0.1:9000`` | -+---------------------------------+----------------------------------+--------------------+ -| ``web_enable_cors`` | ``http_enable_cors`` | ``true`` | -+---------------------------------+----------------------------------+--------------------+ -| ``web_enable_gzip`` | ``http_enable_gzip`` | ``true`` | -+---------------------------------+----------------------------------+--------------------+ -| ``web_max_header_size`` | ``http_max_header_size`` | ``8192`` | -+---------------------------------+----------------------------------+--------------------+ -| ``web_max_initial_line_length`` | ``http_max_initial_line_length`` | ``4096`` | -+---------------------------------+----------------------------------+--------------------+ -| ``web_thread_pool_size`` | ``http_thread_pool_size`` | ``16`` | -+---------------------------------+----------------------------------+--------------------+ -| ``web_enable_tls`` | ``http_enable_tls`` | ``false`` | -+---------------------------------+----------------------------------+--------------------+ -| ``web_tls_cert_file`` | ``http_tls_cert_file`` | Empty | -+---------------------------------+----------------------------------+--------------------+ -| ``web_tls_key_file`` | ``http_tls_key_file`` | Empty | -+---------------------------------+----------------------------------+--------------------+ -| ``web_tls_key_password`` | ``http_tls_key_password`` | Empty | -+---------------------------------+----------------------------------+--------------------+ - -Plugins merged into the Graylog server -====================================== - -Starting with Graylog 3.0.0, the following official plugins were merged into the Graylog server: - -- `Beats Input `_ -- `CEF Input `_ -- `Collector Plugin `_ -- `Enterprise Integration Page `_ -- `Map Widget `_ -- `NetFlow Input `_ -- `Pipeline Processor `_ - -That means these plugins are not available as separate plugins anymore. If you manually update your Graylog installation (without using operating system packages), make sure to remove all old plugin files from the `plugin_dir `_ folder. - -The old issues in these repositories are still available for reference but new issues should only be created in the `Graylog server issue tracker `_. - -The following HTTP API paths changed due to the plugin merge: - -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| Old Path | New Path | -+=============================================================================================+===============================================+ -| ``/plugins/org.graylog.plugins.map/mapdata`` | ``/search/mapdata`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/pipeline`` | ``/system/pipelines/pipeline`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/pipeline/parse`` | ``/system/pipelines/pipeline/parse`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/rule`` | ``/system/pipelines/rule`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/rule/functions`` | ``/system/pipelines/rule/functions`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/rule/multiple`` | ``/system/pipelines/rule/multiple`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/rule/parse`` | ``/system/pipelines/rule/parse`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/connections`` | ``/system/pipelines/connections`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/connections/to_stream`` | ``/system/pipelines/connections/to_stream`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/connections/to_pipeline`` | ``/system/pipelines/connections/to_pipeline`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ -| ``/plugins/org.graylog.plugins.pipelineprocessor/system/pipelines/simulate`` | ``/system/pipelines/simulate`` | -+---------------------------------------------------------------------------------------------+-----------------------------------------------+ - -New "bin_dir" and "data_dir" configuration parameters -===================================================== - -We introduced two new configuration parameters related to file system paths. - -- ``bin_dir`` config option points to the directory that contains scripts like ``graylogctl``. -- ``data_dir`` option configures the base directory for Graylog server state. - -Please check the updated default ``graylog.conf`` configuration file for required changes to your existing file. - - -Removed support for Drools-based filters -======================================== - -For a long time, Graylog allowed to use `Drools `_ to filter messages. Unfortunately, using Drools to perform complex filter logic came with a performance penalty and wasn't as flexible as we would have liked it to be. - -Starting with Graylog 3.0.0, the support for Drools-based message filters has been removed from Graylog. The ``rules_file`` configuration setting has been removed accordingly. - -We recommend migrating the Drools-based logic to `Processing Pipelines `_. - - -Drools-based blacklist ----------------------- - -Graylog provided undocumented blacklist-functionality based on Drools. This blacklist could only be modified via the Graylog REST API on the ``/filters/blacklist`` resource. - -If you've been using this functionality, you'll have to migrate these blacklist rules to the `Processing Pipelines `_. - -To check if you're using the Drools-based blacklist in Graylog prior to version 3.0.0, you can run the following command:: - - # curl -u admin:password -H 'Accept: application/json' 'http://graylog.example.com/api/filters/blacklist?pretty=true' - - -String-based blacklist rule -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Old blacklist rule:: - - { - "id" : "54e300001234123412340001", - "type" : "string", - "name" : "String Blacklist", - "description" : "Drop messages based on case-insensitive string comparison", - "fieldName" : "custom_field", - "pattern" : "EXAMPLE pattern", - "creator_user_id" : "admin", - "created_at" : "2018-04-04T12:00:00.000Z" - } - -New pipeline rule:: - - rule "string-blacklist" - when - has_field("custom_field") && - lowercase(to_string($message.custom_field)) == "example pattern" - then - drop_message(); - end - -See also: - -* `has_field() `_ -* `lowercase() `_ -* `drop_message() `_ - -Regex-based blacklist rule -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Old blacklist rule:: - - { - "id" : "54e300001234123412340002", - "type" : "regex", - "name" : "Regex Blacklist", - "description" : "Drop messages based on regular expression", - "fieldName" : "custom_field", - "pattern" : "^EXAMPLE.*", - "creator_user_id" : "admin", - "created_at" : "2018-04-04T12:00:00.000Z" - } - -New pipeline rule:: - - rule "regex-blacklist" - when - has_field("custom_field") && - regex("^EXAMPLE.*", to_string($message.custom_field)).matches == true - then - drop_message(); - end - -See also: - -* `has_field() `_ -* `regex() `_ -* `drop_message() `_ - -IP Range-based blacklist rule -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Old blacklist rule:: - - { - "id" : "54e300001234123412340003", - "type" : "iprange", - "name" : "IP Blacklist", - "description" : "Drop messages based on IP address", - "fieldName" : "custom_field", - "pattern" : "192.168.0.0/16", - "creator_user_id" : "admin", - "created_at" : "2018-04-04T12:00:00.000Z" - } - -New pipeline rule:: - - rule "ip-blacklist" - when - has_field("custom_field") && - cidr_match("192.168.0.0/16", to_ip($message.custom_field)) - then - drop_message(); - end - -See also: - -* `has_field() `_ -* `to_ip() `_ -* `cidr_match() `_ -* `drop_message() `_ - - -Changed metrics name for stream rules -===================================== - -The name of the metrics for stream rules have been changed to include the stream ID which helps identifying the actual stream they are related to. - -Old metric name:: - - org.graylog2.plugin.streams.StreamRule.${stream-rule-id}.executionTime - -New metric name:: - - org.graylog2.plugin.streams.Stream.${stream-id}.StreamRule.${stream-rule-id}.executionTime - - -Email alarm callback default settings -===================================== - -The defaults of the configuration settings for the email alarm callback with regard to encrypted connections have been changed. - -+-----------------------------+-------------+-------------+ -| Setting | Old default | New default | -+=============================+=============+=============+ -| ``transport_email_use_tls`` | ``false`` | ``true`` | -+-----------------------------+-------------+-------------+ -| ``transport_email_use_ssl`` | ``true`` | ``false`` | -+-----------------------------+-------------+-------------+ - -Furthermore, it's not possible anymore to enable both settings (SMTP with STARTTLS and SMTP over SSL) at the same time because this led to errors at runtime when Graylog tried to upgrade the connection to TLS with STARTTLS in an already existing SMTPS connection. - -Most SMTP services prefer SMTP with STARTTLS to provide an encrypted connection. diff --git a/api-specs/stream-output-filters.yml b/api-specs/stream-output-filters.yml new file mode 100644 index 000000000000..11971ba77ee2 --- /dev/null +++ b/api-specs/stream-output-filters.yml @@ -0,0 +1,530 @@ +openapi: "3.1.0" +info: + title: "Stream Destination Filters" + version: "1.0.0" +servers: + - url: "http://localhost:8080/api" + +paths: + /streams/{streamId}/destinations/filters: + parameters: + - in: "path" + name: "streamId" + description: "The stream ID" + schema: + type: "string" + required: true + + get: + summary: "Get available filters for stream" + parameters: + - $ref: "#/components/parameters/query" + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/per_page" + - $ref: "#/components/parameters/sort" + - $ref: "#/components/parameters/order" + responses: + 200: + description: "Available filters" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/PaginationInfo" + - type: "object" + properties: + elements: + $ref: "#/components/schemas/StreamDestinationFilterRule" + + post: + summary: "Create new filter rule for stream" + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/StreamDestinationFilterRule" + responses: + 200: + description: "Newly created filter rule" + content: + application/json: + schema: + properties: + filter: + $ref: "#/components/schemas/StreamDestinationFilterRule" + + /streams/{streamId}/destinations/target/{targetId}/filters: + parameters: + - in: "path" + name: "streamId" + description: "The stream ID" + schema: + type: "string" + required: true + - in: "path" + name: "targetId" + description: "The target ID" + schema: + type: "string" + required: true + + get: + summary: "Get available filters for stream and target" + parameters: + - $ref: "#/components/parameters/query" + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/per_page" + - $ref: "#/components/parameters/sort" + - $ref: "#/components/parameters/order" + responses: + 200: + description: "Available filters" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/PaginationInfo" + - type: "object" + properties: + elements: + $ref: "#/components/schemas/StreamDestinationFilterRule" + + /streams/{streamId}/destinations/filters/{filterId}: + parameters: + - in: "path" + name: "streamId" + description: "The stream ID" + schema: + type: "string" + required: true + - in: "path" + name: "filterId" + description: "The stream destination filter ID" + schema: + type: "string" + required: true + + get: + summary: "Get stream destination filter" + responses: + 200: + description: "The stream destination filter" + content: + application/json: + schema: + properties: + filter: + $ref: "#/components/schemas/StreamDestinationFilterRule" + + put: + summary: "Update stream destination filter" + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/StreamDestinationFilterRule" + responses: + 200: + description: "The updated stream destination filter" + content: + application/json: + schema: + properties: + filter: + $ref: "#/components/schemas/StreamDestinationFilterRule" + + delete: + summary: "Delete stream destination filter" + responses: + 200: + description: "The deleted stream destination filter" + content: + application/json: + schema: + properties: + filter: + $ref: "#/components/schemas/StreamDestinationFilterRule" + + /streams/destinations/filters/builder/conditions: + get: + summary: "Return available conditions" + responses: + 200: + description: "Conditions" + content: + application/json: + schema: + properties: + conditions: + type: "array" + conditions: + $ref: "#/components/schemas/RuleBuilderFunction" + + /streams/destinations/filters/builder/validate: + post: + summary: "Validates the given rule builder" + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/RuleBuilderDTO" + responses: + 200: + description: "Validated rule builder" + content: + application/json: + schema: + properties: + rule_builder: + $ref: "#/components/schemas/RuleBuilderDTO" + + /streams/destinations/filters/builder/simulate: + post: + summary: "Run the simulator for the given rule and message" + requestBody: + required: true + content: + application/json: + schema: + type: "object" + required: + - "message" + - "rule_builder_dto" + properties: + message: + type: "string" + rule_builder_dto: + $ref: "#/components/schemas/RuleBuilderDTO" + responses: + 200: + description: "Simulator result" + content: + application/json: + schema: + properties: + result: + type: "object" + default: + source: "localhost" + timestamp: "2024-06-25T12:23:00.000Z" + message: "A message" + +components: + parameters: + page: + in: "query" + name: "page" + description: "The page to return" + required: false + schema: + type: "integer" + default: 1 + per_page: + in: "query" + name: "per_page" + description: "The number of items per page" + required: false + schema: + type: "integer" + default: 20 + query: + in: "query" + name: "query" + description: "The filter query" + required: false + schema: + type: "string" + default: "" + sort: + in: "query" + name: "sort" + description: "The field to sort on" + required: false + schema: + type: "string" + default: "title" + order: + in: "query" + name: "order" + description: "The sort order" + required: false + schema: + type: "string" + default: "ASC" + enum: + - "ASC" + - "DESC" + + schemas: + ########################################################################################################## + ## New types + ########################################################################################################## + StreamDestinationFilterRule: + type: "object" + required: + - "id" + - "stream_id" + - "destination_type" + - "title" + - "rule" + properties: + id: + type: "string" + description: "The database ID" + stream_id: + type: "string" + description: "The associated stream ID" + destination_type: + type: "string" + description: "The destination type" + enum: + - "indexer" + - "data-warehouse" + title: + type: "string" + description: "The filter title" + description: + type: "string" + description: "The filter description" + status: + type: "string" + description: "The filter status" + enum: + - "enabled" + - "disabled" + rule: + $ref: "#/components/schemas/RuleBuilder" + example: + id: "6666dadd4dfc4e7c263a7900" + stream_id: "6666df991ca760793bf0fd56" + destination_type: "indexer" + title: "Skip debug messages" + status: "enabled" + rule: + operator: "OR" + conditions: + - id: "condition-1" + function: "has_field" + params: + field: "is_debug" + negate: false + - id: "condition-2" + function: "has_field" + params: + field: "msg_is_debug" + negate: false + + ########################################################################################################## + ## Existing types + ########################################################################################################## + RuleBuilderFunction: + type: "object" + properties: + name: + type: "string" + description: + type: "string" + pure: + type: "boolean" + return_type: + type: "string" + params: + type: "array" + items: + $ref: "#/components/schemas/RuleBuilderFunctionParam" + rule_builder_enabled: + type: "boolean" + rule_builder_title: + type: "string" + rule_builder_name: + type: "string" + example: + name: "set_field" + description: "" + pure: false + return_type: "java.lang.Void" + params: + - name: "field" + description: "the new field name" + type: "java.lang.String" + transformed_type: "java.lang.String" + optional: false + default_value: null + rule_builder_variable: false + allow_negatives: false + + RuleBuilderFunctionParam: + type: "object" + properties: + name: + type: "string" + description: + type: "string" + type: + type: "string" + transformed_type: + type: "string" + optional: + type: "boolean" + default_value: + type: "string" + rule_builder_variable: + type: "boolean" + allow_negatives: + type: "boolean" + example: + name: "field" + description: "the new field name" + type: "java.lang.String" + transformed_type: "java.lang.String" + optional: false + default_value: null + rule_builder_variable: false + allow_negatives: false + + RuleBuilderDTO: + type: "object" + properties: + id: + type: "string" + title: + type: "string" + description: + type: "string" + source: + type: "string" + rule_builder: + $ref: "#/components/schemas/RuleBuilder" + simulator_message: + type: "string" + created_at: + type: "string" + format: "date-time" + modified_at: + type: "string" + format: "date-time" + + RuleBuilder: + type: "object" + properties: + operator: + type: "string" + description: "The operator to use to evaluate the conditions" + default: "AND" + enum: + - "AND" + - "OR" + conditions: + type: "array" + description: "The conditions array" + items: + $ref: "#/components/schemas/RuleBuilderStep" + + RuleBuilderStep: + type: "object" + required: + - "id" + - "function" + - "parameters" + properties: + id: + type: "string" + description: "The condition ID" + function: + type: "string" + description: "The function name" + params: + type: "object" + description: "The named function parameters" + negate: + type: "boolean" + description: "Whether to negate the condition" + default: false + operator: + type: "string" + description: "The operator to use to evaluate the nested conditions" + default: "AND" + enum: + - "AND" + - "OR" + conditions: + type: "array" + description: "The nested conditions array" + items: + $ref: "#/components/schemas/RuleBuilderStep" + example: + id: "condition-1" + function: "has_field" + params: + field: "source" + negate: false + operator: "AND" + conditions: [] + + PaginationInfo: + type: "object" + properties: + attributes: + type: "array" + items: + type: "object" + properties: + id: + type: "string" + default: "title" + title: + type: "string" + default: "The Title" + sortable: + type: "boolean" + default: true + searchable: + type: "boolean" + default: true + hidden: + type: "boolean" + default: false + defaults: + type: "object" + properties: + sort: + type: "object" + properties: + id: + type: "string" + default: "title" + direction: + type: "string" + default: "ASC" + enum: + - "ASC" + - "DESC" + query: + type: "string" + pagination: + type: "object" + properties: + total: + type: "number" + default: 0 + count: + type: "number" + default: 0 + page: + type: "number" + default: 0 + per_page: + type: "number" + default: 20 + total: + type: "number" + default: 0 + sort: + type: "string" + default: "title" + order: + type: "string" + default: "asc" diff --git a/bin/certutil b/bin/certutil new file mode 100755 index 000000000000..06b2a1ebd183 --- /dev/null +++ b/bin/certutil @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +CMD=$1 +NOHUP=${NOHUP:=$(which nohup)} +PS=${PS:=$(which ps)} + +# default java +JAVA_CMD=${JAVA_CMD:=$(which java)} + + +if [ -n "$JAVA_HOME" ] +then + # try to use $JAVA_HOME + if [ -x "$JAVA_HOME"/bin/java ] + then + JAVA_CMD="$JAVA_HOME"/bin/java + else + die "$JAVA_HOME"/bin/java is not executable + fi +fi + +# resolve links - $0 may be a softlink +GRAYLOGCTL="$0" + +while [ -h "$GRAYLOGCTL" ]; do + ls=$(ls -ld "$GRAYLOGCTL") + link=$(expr "$ls" : '.*-> \(.*\)$') + if expr "$link" : '/.*' > /dev/null; then + GRAYLOGCTL="$link" + else + GRAYLOGCTL=$(dirname "$GRAYLOGCTL")/"$link" + fi +done + +# take variables from environment if set +GRAYLOGCTL_DIR=${GRAYLOGCTL_DIR:=$(dirname "$GRAYLOGCTL")} +GRAYLOG_JVM_DIR="$(dirname "$GRAYLOGCTL_DIR")/jvm" +GRAYLOG_SERVER_JAR=${GRAYLOG_SERVER_JAR:=graylog.jar} +DEFAULT_JAVA_OPTS="-Dlog4j2.formatMsgNoLookups=true -Djdk.tls.acknowledgeCloseNotify=true -Xms1g -Xmx1g -XX:+UseG1GC -server -XX:-OmitStackTraceInFastThrow" + +if [ -z "$JAVA_HOME" ] && [ -d "$GRAYLOG_JVM_DIR" ]; then + echo "Using bundled JVM in $GRAYLOG_JVM_DIR" + export JAVA_HOME="$GRAYLOG_JVM_DIR" + JAVA_CMD="$GRAYLOG_JVM_DIR/bin/java" +fi + +JAVA_OPTS="${JAVA_OPTS:="$DEFAULT_JAVA_OPTS"}" + +certutil() { + echo "Running certutil $1..." + cd "$GRAYLOGCTL_DIR/.." + "${JAVA_CMD}" ${JAVA_OPTS} ${LOG4J} -jar "${GRAYLOG_SERVER_JAR}" certutil $1 +} + +case "$CMD" in + ca) + certutil "ca" + ;; + cert) + certutil "cert" + ;; + http) + certutil "http" + ;; + truststore) + certutil "truststore" + ;; + *) + echo "Usage $0 {ca|cert|http|truststore}" +esac diff --git a/bin/graylogctl b/bin/graylogctl index 82e728acacd8..65c49c1065cf 100755 --- a/bin/graylogctl +++ b/bin/graylogctl @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash CMD=$1 NOHUP=${NOHUP:=$(which nohup)} @@ -46,12 +46,20 @@ done # take variables from environment if set GRAYLOGCTL_DIR=${GRAYLOGCTL_DIR:=$(dirname "$GRAYLOGCTL")} +GRAYLOG_JVM_DIR="$(dirname "$GRAYLOGCTL_DIR")/jvm" GRAYLOG_SERVER_JAR=${GRAYLOG_SERVER_JAR:=graylog.jar} GRAYLOG_CONF=${GRAYLOG_CONF:=/etc/graylog/server/server.conf} GRAYLOG_PID=${GRAYLOG_PID:=/tmp/graylog.pid} LOG_FILE=${LOG_FILE:=log/graylog-server.log} LOG4J=${LOG4J:=} -DEFAULT_JAVA_OPTS="-Djava.library.path=${GRAYLOGCTL_DIR}/../lib/sigar -Xms1g -Xmx1g -XX:NewRatio=1 -server -XX:+ResizeTLAB -XX:+UseConcMarkSweepGC -XX:+CMSConcurrentMTEnabled -XX:+CMSClassUnloadingEnabled -XX:+UseParNewGC -XX:-OmitStackTraceInFastThrow" +DEFAULT_JAVA_OPTS="-Dlog4j2.formatMsgNoLookups=true -Djdk.tls.acknowledgeCloseNotify=true -Xms1g -Xmx1g -XX:+UseG1GC -server -XX:-OmitStackTraceInFastThrow" + +if [ -z "$JAVA_HOME" ] && [ -d "$GRAYLOG_JVM_DIR" ]; then + echo "Using bundled JVM in $GRAYLOG_JVM_DIR" + export JAVA_HOME="$GRAYLOG_JVM_DIR" + JAVA_CMD="$GRAYLOG_JVM_DIR/bin/java" +fi + JAVA_OPTS="${JAVA_OPTS:="$DEFAULT_JAVA_OPTS"}" start() { diff --git a/changelog/5.0.0/issue-11528.toml b/changelog/5.0.0/issue-11528.toml new file mode 100644 index 000000000000..71317156430d --- /dev/null +++ b/changelog/5.0.0/issue-11528.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add support for custom OIDC claims" + +issues = ["11528"] +pulls = ["3544", "12624"] diff --git a/changelog/5.0.0/issue-11973.toml b/changelog/5.0.0/issue-11973.toml new file mode 100644 index 000000000000..69be063db224 --- /dev/null +++ b/changelog/5.0.0/issue-11973.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added validation for field value types. It will detect the correct type of the field and warn if user enters a value of different data type." + +issues = ["11973"] +pulls = ["12088"] diff --git a/changelog/5.0.0/issue-12025.toml b/changelog/5.0.0/issue-12025.toml new file mode 100644 index 000000000000..01076eed90b0 --- /dev/null +++ b/changelog/5.0.0/issue-12025.toml @@ -0,0 +1,7 @@ +type = "changed" +message = "Set elasticsearch_index_optimization_jobs default to 10" + +details.user = "Prevents blocking of master node's ingestion during optimizing indices after index rotation." + +issues = ["12025"] +pulls = ["13521"] diff --git a/changelog/5.0.0/issue-12090.toml b/changelog/5.0.0/issue-12090.toml new file mode 100644 index 000000000000..73e77024644c --- /dev/null +++ b/changelog/5.0.0/issue-12090.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Hide edit button for permissions config if the user doesn't have edit permissions." + +issues = ["12090"] +pulls = ["13939"] diff --git a/changelog/5.0.0/issue-12337.toml b/changelog/5.0.0/issue-12337.toml new file mode 100644 index 000000000000..203bc1e5b484 --- /dev/null +++ b/changelog/5.0.0/issue-12337.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add ability for inputs to specify encoding" + +issues = ["12337"] +pulls = ["12654"] diff --git a/changelog/5.0.0/issue-12421.toml b/changelog/5.0.0/issue-12421.toml new file mode 100644 index 000000000000..ce10f43c1029 --- /dev/null +++ b/changelog/5.0.0/issue-12421.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix incorrect prometheus mapping for input metrics." + +issues = ["12421"] +pulls = ["12560"] diff --git a/changelog/5.0.0/issue-12572.toml b/changelog/5.0.0/issue-12572.toml new file mode 100644 index 000000000000..201101658d8c --- /dev/null +++ b/changelog/5.0.0/issue-12572.toml @@ -0,0 +1,5 @@ +type = "added" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "The list of fields can now show only the fields that are present in selected streams, if `stream_aware_field_types` config property is set to true." + +issues = ["12572", "9397"] +pulls = ["13363"] diff --git a/changelog/5.0.0/issue-12778.toml b/changelog/5.0.0/issue-12778.toml new file mode 100644 index 000000000000..5bc7dca1d90c --- /dev/null +++ b/changelog/5.0.0/issue-12778.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix bug when installing Sidecar collector configuration in Content Pack." + +issues = ["12778"] +pulls = ["13632"] diff --git a/changelog/5.0.0/issue-12888.toml b/changelog/5.0.0/issue-12888.toml new file mode 100644 index 000000000000..85cef8d97c49 --- /dev/null +++ b/changelog/5.0.0/issue-12888.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Uses whitespace analyzer for query parsing and validation. This is needed to prevent unexpected field value parsing." + +issues = ["12888"] +pulls = ["12918"] diff --git a/changelog/5.0.0/issue-12908.toml b/changelog/5.0.0/issue-12908.toml new file mode 100644 index 000000000000..82abb17a1a9c --- /dev/null +++ b/changelog/5.0.0/issue-12908.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fixes sorting exception on aggregations by removing 'latest' metrics from the list of possible sorts." + +issues = ["12908"] +pulls = ["14027"] diff --git a/changelog/5.0.0/issue-12909.toml b/changelog/5.0.0/issue-12909.toml new file mode 100644 index 000000000000..3a2c342a4eb6 --- /dev/null +++ b/changelog/5.0.0/issue-12909.toml @@ -0,0 +1,10 @@ +type = "fixed" +message = "Fix breaking change in Geo-Location Processor fields." + +issues = ["12909", "13202", "13203"] +pulls = ["13094"] + +details.user = """ +Changes in the IP Geo-Location Processor caused the fields generated to be different from what was previously expected. +The new behavior and fields generated by the processor can be found in the processor's [documentation](https://docs.graylog.org/docs/geolocation). +""" diff --git a/changelog/5.0.0/issue-12910.toml b/changelog/5.0.0/issue-12910.toml new file mode 100644 index 000000000000..222a12e7f02a --- /dev/null +++ b/changelog/5.0.0/issue-12910.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Event definition page now includes a button to edit it directly" + +issues = ["12910"] +pulls = ["13248"] diff --git a/changelog/5.0.0/issue-13021.toml b/changelog/5.0.0/issue-13021.toml new file mode 100644 index 000000000000..f0c5a1973b94 --- /dev/null +++ b/changelog/5.0.0/issue-13021.toml @@ -0,0 +1,6 @@ +type = "changed" +message = "Index detail shard numbers (segments, open search contexts, and deleted messages) are now formatted." +contributors = ["@supahgreg"] + +issues = [] +pulls = ["13021"] diff --git a/changelog/5.0.0/issue-13024.toml b/changelog/5.0.0/issue-13024.toml new file mode 100644 index 000000000000..f4b72a972fcd --- /dev/null +++ b/changelog/5.0.0/issue-13024.toml @@ -0,0 +1,6 @@ +type = "added" +message = "Show Elasticsearch/OpenSearch cluster health on index sets overview page." +contributors = ["@supahgreg"] + +issues = ["13017"] +pulls = ["13024"] diff --git a/changelog/5.0.0/issue-13028.toml b/changelog/5.0.0/issue-13028.toml new file mode 100644 index 000000000000..cebc4384275d --- /dev/null +++ b/changelog/5.0.0/issue-13028.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "When using the index time rotation strategy, roatating of empty index sets is skipped." + +issues = ["13028"] +pulls = ["13735, graylog-plugin-enterprise#3869"] diff --git a/changelog/5.0.0/issue-13053.toml b/changelog/5.0.0/issue-13053.toml new file mode 100644 index 000000000000..45f256c0b76c --- /dev/null +++ b/changelog/5.0.0/issue-13053.toml @@ -0,0 +1,5 @@ +type = "a" +message = "New configuration flag ignore_migration_failures to skip migrations which are blocked" + +issues = ["13053"] +pulls = ["13137"] diff --git a/changelog/5.0.0/issue-13082.toml b/changelog/5.0.0/issue-13082.toml new file mode 100644 index 000000000000..20350cfda1c3 --- /dev/null +++ b/changelog/5.0.0/issue-13082.toml @@ -0,0 +1,6 @@ +type = "f" +message = "Fixing autocompletion for inputs with quoting/containing slashes." + + +issues = ["13082"] +pulls = ["13087", "13092"] diff --git a/changelog/5.0.0/issue-13114.toml b/changelog/5.0.0/issue-13114.toml new file mode 100644 index 000000000000..16a59b572316 --- /dev/null +++ b/changelog/5.0.0/issue-13114.toml @@ -0,0 +1,6 @@ +type = "changed" +message = "Switch from 'openjdk' to 'eclipse-temurin' for Graylog Server test image." +contributors = ["@supahgreg"] + +issues = ["13114"] +pulls = ["13002"] diff --git a/changelog/5.0.0/issue-13116.toml b/changelog/5.0.0/issue-13116.toml new file mode 100644 index 000000000000..a5a6c7db5ac0 --- /dev/null +++ b/changelog/5.0.0/issue-13116.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Show title of dashboards and saved searches on page." + +issues = ["13116"] +pulls = ["13491"] diff --git a/changelog/5.0.0/issue-13157.toml b/changelog/5.0.0/issue-13157.toml new file mode 100644 index 000000000000..86f63e2584a8 --- /dev/null +++ b/changelog/5.0.0/issue-13157.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Changed items displayed in the System dropdown menu to match the permissions for the page that they link to." + +issues = ["13157"] +pulls = ["13188"] diff --git a/changelog/5.0.0/issue-13251.toml b/changelog/5.0.0/issue-13251.toml new file mode 100644 index 000000000000..4e07a7282e72 --- /dev/null +++ b/changelog/5.0.0/issue-13251.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add a new built-in \"Pipelines Manager\" role." + +issues = ["13251"] +pulls = ["13866"] diff --git a/changelog/5.0.0/issue-13254.toml b/changelog/5.0.0/issue-13254.toml new file mode 100644 index 000000000000..a8d606f6c87c --- /dev/null +++ b/changelog/5.0.0/issue-13254.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix `source` field extraction for Beats version 8 and later." + +issues = ["13254"] +pulls = ["13895"] diff --git a/changelog/5.0.0/issue-13259.toml b/changelog/5.0.0/issue-13259.toml new file mode 100644 index 000000000000..6d12eccfe764 --- /dev/null +++ b/changelog/5.0.0/issue-13259.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Do not toggle message details in message table widget, when selecting text." + +issues = ["13259"] +pulls = ["13263"] diff --git a/changelog/5.0.0/issue-13306.toml b/changelog/5.0.0/issue-13306.toml new file mode 100644 index 000000000000..ffd99335e21f --- /dev/null +++ b/changelog/5.0.0/issue-13306.toml @@ -0,0 +1,11 @@ +type = "fixed" +message = "Fix performance regression on UDP inputs with newer Java versions" + +issues = ["13306"] +pulls = ["14005"] + +contributors = ["@giangi"] + +details.user = """ +UDP inputs became slow when running Graylog with a Java version > 8. +""" diff --git a/changelog/5.0.0/issue-13317.toml b/changelog/5.0.0/issue-13317.toml new file mode 100644 index 000000000000..a21e984e0820 --- /dev/null +++ b/changelog/5.0.0/issue-13317.toml @@ -0,0 +1,5 @@ +type = "removed" +message = "Removed support for Elasticsearch 6" + +issues = ["13317"] +pulls = ["13321", "graylog-plugin-enterprise#3992", "graylog-project-internal#77", "graylog-plugin-enterprise#4035"] diff --git a/changelog/5.0.0/issue-13454.toml b/changelog/5.0.0/issue-13454.toml new file mode 100644 index 000000000000..c2f47ded2411 --- /dev/null +++ b/changelog/5.0.0/issue-13454.toml @@ -0,0 +1,11 @@ +type = "fixed" +message = "Fix journal directory preflight size check" + +issues = ["13454"] +pulls = ["13470"] + +contributors = [""] + +details.user = """ +Include the already used size of the journal directory into the size calcalation. +""" diff --git a/changelog/5.0.0/issue-13455.toml b/changelog/5.0.0/issue-13455.toml new file mode 100644 index 000000000000..6d5a4977e25a --- /dev/null +++ b/changelog/5.0.0/issue-13455.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Queries that have the form of `_exists_:field_name` are now validated without any wrong warning messages." + +issues = ["13455"] +pulls = ["13489"] diff --git a/changelog/5.0.0/issue-13593.toml b/changelog/5.0.0/issue-13593.toml new file mode 100644 index 000000000000..a2ac8102d5b9 --- /dev/null +++ b/changelog/5.0.0/issue-13593.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing latest aggregation if field is not present in time range." + +issues = ["13593"] +pulls = ["13640"] diff --git a/changelog/5.0.0/issue-13660.toml b/changelog/5.0.0/issue-13660.toml new file mode 100644 index 000000000000..d2d760762983 --- /dev/null +++ b/changelog/5.0.0/issue-13660.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Pre-flight check requires at least MongoDB 5.0" + +issues = ["13660"] +pulls = ["13778"] diff --git a/changelog/5.0.0/issue-13721.toml b/changelog/5.0.0/issue-13721.toml new file mode 100644 index 000000000000..47dbb873710a --- /dev/null +++ b/changelog/5.0.0/issue-13721.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix \"Unauthorized\" messages in server log when \"Trusted Header Authentication\" is enabled and a user session expires." + +issues = ["13721"] +pulls = ["13735"] diff --git a/changelog/5.0.0/issue-13819.toml b/changelog/5.0.0/issue-13819.toml new file mode 100644 index 000000000000..a6b808c1d499 --- /dev/null +++ b/changelog/5.0.0/issue-13819.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Take into account if \"Always match\" rule is inverted. Previously it was ignored for this rule type." + +issues = ["13819"] +pulls = ["13847"] diff --git a/changelog/5.0.0/issue-13888.toml b/changelog/5.0.0/issue-13888.toml new file mode 100644 index 000000000000..da57770af339 --- /dev/null +++ b/changelog/5.0.0/issue-13888.toml @@ -0,0 +1,7 @@ +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "The pipeline function flatten_json now respects the original JSON types. An optional parameter is provided for backwards compatibility." + +issues = ["13888"] +pulls = ["13947"] diff --git a/changelog/5.0.0/issue-13934.toml b/changelog/5.0.0/issue-13934.toml new file mode 100644 index 000000000000..9a302c34247b --- /dev/null +++ b/changelog/5.0.0/issue-13934.toml @@ -0,0 +1,9 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "Remove not needed `rel='noreferer'` from documentation links" + +issues = ["13934"] +pulls = ["13941"] diff --git a/changelog/5.0.0/issue-13973.toml b/changelog/5.0.0/issue-13973.toml new file mode 100644 index 000000000000..dcd7bfdfc396 --- /dev/null +++ b/changelog/5.0.0/issue-13973.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Allow non-word-characters in lookup tables key field" + +issues = ["13973"] +pulls = ["14029"] diff --git a/changelog/5.0.0/issue-13998.toml b/changelog/5.0.0/issue-13998.toml new file mode 100644 index 000000000000..3577ddc1df94 --- /dev/null +++ b/changelog/5.0.0/issue-13998.toml @@ -0,0 +1,12 @@ +type = "fixed" +message = "Fix appearing toast errors when creating HTTP JSONPath data adapter and start editing the title." + +pulls = ["14004"] + +details.user = """ +When a user creates a new adapter and starts to edit the title, +errors were displayed immediately. + +This happened because of the missing URL, now a default URL is used, +so the new adapter is not immediately invalid. +""" diff --git a/changelog/5.0.0/issue-3807.toml b/changelog/5.0.0/issue-3807.toml new file mode 100644 index 000000000000..0bc65425e6a5 --- /dev/null +++ b/changelog/5.0.0/issue-3807.toml @@ -0,0 +1,5 @@ +type = "added" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Feature preview of Scripting API." + +issues = ["3807"] +pulls = ["13809"] diff --git a/changelog/5.0.0/issue-5040.toml b/changelog/5.0.0/issue-5040.toml new file mode 100644 index 000000000000..a3dd829bf0d9 --- /dev/null +++ b/changelog/5.0.0/issue-5040.toml @@ -0,0 +1,13 @@ +type = "changed" +message = "Fresh Graylog Installations will have a new default message processor order" + +issues = ["5040"] +pulls = ["13081"] + +contributors = [""] + +details.user = """ +In new Graylog installations, the Message Filter Chain will run before the Pipeline Processor. +This enables the more intuitive stream rules to assign messages to streams, before they +are passed to the pipeline interpreter. +""" diff --git a/changelog/5.0.0/issue-6770.toml b/changelog/5.0.0/issue-6770.toml new file mode 100644 index 000000000000..9c7c9e0ab7f5 --- /dev/null +++ b/changelog/5.0.0/issue-6770.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix alert system on low message input" + +issues = ["6770"] +pulls = ["13556"] diff --git a/changelog/5.0.0/issue-6909.toml b/changelog/5.0.0/issue-6909.toml new file mode 100644 index 000000000000..44a45e56ed9a --- /dev/null +++ b/changelog/5.0.0/issue-6909.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Auto focus first suggestion in search query input autocompletion." + +issues = ["6909"] +pulls = ["12991"] diff --git a/changelog/5.0.0/issue-7212.toml b/changelog/5.0.0/issue-7212.toml new file mode 100644 index 000000000000..ae316150ed2b --- /dev/null +++ b/changelog/5.0.0/issue-7212.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed bug preventing installation of Content Packs that reference System Streams." + +issues = ["7212"] +pulls = ["13398"] diff --git a/changelog/5.0.0/issue-7371.toml b/changelog/5.0.0/issue-7371.toml new file mode 100644 index 000000000000..55e944ef1d0c --- /dev/null +++ b/changelog/5.0.0/issue-7371.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed bug preventing processing of CEF message fields with large integer values." + +issues = ["7371"] +pulls = ["13812"] diff --git a/changelog/5.0.0/issue-7470.toml b/changelog/5.0.0/issue-7470.toml new file mode 100644 index 000000000000..1be4841165ea --- /dev/null +++ b/changelog/5.0.0/issue-7470.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Reimplement ability to reorder dashboard pages." + +issues = ["13744"] +pulls = ["7470"] diff --git a/changelog/5.0.0/issue-7593.toml b/changelog/5.0.0/issue-7593.toml new file mode 100644 index 000000000000..f0189d6b5fee --- /dev/null +++ b/changelog/5.0.0/issue-7593.toml @@ -0,0 +1,6 @@ +type = "changed" +message = "'Outdated version' notifications now include links to the changelog pages." +contributors = ["@supahgreg"] + +issues = ["7593", "7689"] +pulls = ["12648"] diff --git a/changelog/5.0.0/issue-7801.toml b/changelog/5.0.0/issue-7801.toml new file mode 100644 index 000000000000..d3ab8566a58c --- /dev/null +++ b/changelog/5.0.0/issue-7801.toml @@ -0,0 +1,10 @@ +type = "fixed" +message = "Fix content pack imports error for event notifications." + +issues = ["7801"] +pulls = ["13171", "graylog-plugin-enterprise-integrations#859", "graylog-plugin-integrations#1129"] + +details.user = """ +Fixes an error that would occur when attepting to import a content pack containing an event definitions that have +a notification assigned. +""" diff --git a/changelog/5.0.0/issue-8111.toml b/changelog/5.0.0/issue-8111.toml new file mode 100644 index 000000000000..64ff2789bfdc --- /dev/null +++ b/changelog/5.0.0/issue-8111.toml @@ -0,0 +1,10 @@ +type = "changed" +message = "Replacing nested with linear bucketing in aggregations." + +issues = ["8111"] +pulls = ["13805", "13806", "13855"] + +details.user = """ +Previously, grouping over multiple fields in an aggregation was achieved by nesting aggregations. This potentially resulted in unpredictable results and numbers of buckets being generated. +Aggregations do now return linear buckets and allow specifying single limits for row/column pivots. +""" diff --git a/changelog/5.0.0/issue-9933.toml b/changelog/5.0.0/issue-9933.toml new file mode 100644 index 000000000000..539f4f744e85 --- /dev/null +++ b/changelog/5.0.0/issue-9933.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "When selecting roles from the dropdown they are now automatically assigned to the team/user" + +issues = ["graylog2-server#9933", "graylog-plugin-enterprise#3969"] +pulls = ["graylog2-server#13277", "graylog-plugin-enterprise#4002"] diff --git a/changelog/5.0.0/issue-9998.toml b/changelog/5.0.0/issue-9998.toml new file mode 100644 index 000000000000..f20b4a73674e --- /dev/null +++ b/changelog/5.0.0/issue-9998.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Allow sorting columns in data table widget." + +issues = ["13432"] +pulls = ["9998"] diff --git a/changelog/5.0.0/pr-12299.toml b/changelog/5.0.0/pr-12299.toml new file mode 100644 index 000000000000..64acf1f03272 --- /dev/null +++ b/changelog/5.0.0/pr-12299.toml @@ -0,0 +1,14 @@ +type = "added" +message = "Add config option to set upper limit for index retention period." + +pulls = ["12299"] + +contributors = [""] + +details.user = """ +Adds a new config option `max_index_retention_period` to `graylog.conf`. +It is used to limit the effective index retention period that a user may +configure via the UI. + +The limit will only be enforced if a time-based rotation strategy is chosen. +""" diff --git a/changelog/5.0.0/pr-12370.toml b/changelog/5.0.0/pr-12370.toml new file mode 100644 index 000000000000..1e3b3d2e891d --- /dev/null +++ b/changelog/5.0.0/pr-12370.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Reset time-based rotation strategy after leader change." + +pulls = ["12370"] diff --git a/changelog/5.0.0/pr-12387.toml b/changelog/5.0.0/pr-12387.toml new file mode 100644 index 000000000000..d07698f3f86a --- /dev/null +++ b/changelog/5.0.0/pr-12387.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Support configuration of sidecars on Darwin and FreeBSD." + +issues = ["Graylog2/collector-sidecar#377"] +pulls = ["12387"] diff --git a/changelog/5.0.0/pr-12420.toml b/changelog/5.0.0/pr-12420.toml new file mode 100644 index 000000000000..7caccd44a762 --- /dev/null +++ b/changelog/5.0.0/pr-12420.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix parsing of invalid search operators in query validation." + +pulls = ["12420"] diff --git a/changelog/5.0.0/pr-12877.toml b/changelog/5.0.0/pr-12877.toml new file mode 100644 index 000000000000..1d0d4d907659 --- /dev/null +++ b/changelog/5.0.0/pr-12877.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Adding simple actions to copy field name/value/message fields to clipboard." + +pulls = ["12877"] diff --git a/changelog/5.0.0/pr-12987.toml b/changelog/5.0.0/pr-12987.toml new file mode 100644 index 000000000000..5c0eb5aa1989 --- /dev/null +++ b/changelog/5.0.0/pr-12987.toml @@ -0,0 +1,9 @@ +type = "removed" +message = "Removing legacy alerting management pages." + +details.user = """ +A while ago we rewrote the alerting UI. Some routes for the old UI were still accessible. +With this change we are removing the old alerting management pages completely. +""" + +pulls = ["12987"] diff --git a/changelog/5.0.0/pr-13088.toml b/changelog/5.0.0/pr-13088.toml new file mode 100644 index 000000000000..87f738e4b8d2 --- /dev/null +++ b/changelog/5.0.0/pr-13088.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Improve server startup to log all errors to the log file instead of writing some of them to STDERR." + +pulls = ["13088"] diff --git a/changelog/5.0.0/pr-13113.toml b/changelog/5.0.0/pr-13113.toml new file mode 100644 index 000000000000..3af44d26db98 --- /dev/null +++ b/changelog/5.0.0/pr-13113.toml @@ -0,0 +1,15 @@ +type = "changed" +message = "Retry on Elasticsearch Request entity too large errors" + +issues = [""] +pulls = ["13113, 7071"] + +contributors = [""] + +details.user = """ +An elastic cluster might not have enough memory available to +accept a full bulk index request. It can reject it with +a HTTP/1.1 429 Too Many Requests exception. +In this case we retry the request by splitting the message batch +in half, and try again. +""" diff --git a/changelog/5.0.0/pr-13204.toml b/changelog/5.0.0/pr-13204.toml new file mode 100644 index 000000000000..2ecba7c09d85 --- /dev/null +++ b/changelog/5.0.0/pr-13204.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Add ability to pull Geo Location Processor database files from an S3 bucket." + +pulls = ["13204"] diff --git a/changelog/5.0.0/pr-13258.toml b/changelog/5.0.0/pr-13258.toml new file mode 100644 index 000000000000..9edf8e3f57aa --- /dev/null +++ b/changelog/5.0.0/pr-13258.toml @@ -0,0 +1,17 @@ +type = "changed" +message = "Rename 'All Messages' stream to 'Default Stream'" + +issues = [""] +pulls = ["13258"] + +contributors = [""] + +details.user = """ +The "All Messages" Stream has been renamed to "Default Stream" +Note that the name change is only happening on new installations. +""" + +details.ops = """ +If there are any pipeline rules which lookup the default stream by +name ("All Messages"), they will not work on new Graylog installations anymore. +""" diff --git a/changelog/5.0.0/pr-13276.toml b/changelog/5.0.0/pr-13276.toml new file mode 100644 index 000000000000..d109ee3f03e3 --- /dev/null +++ b/changelog/5.0.0/pr-13276.toml @@ -0,0 +1,8 @@ +type = "c" +message = "Java 17" + +pulls = ["13276", "graylog-plugin-enterprise#3961", "graylog-plugin-enterprise-integrations#869", "graylog-plugin-collector#205", "graylog-plugin-aws#663", "graylog-plugin-integrations#1153", "13330", "12644"] + +details.user = """ +Switch from Java 8 to Java 17. +""" diff --git a/changelog/5.0.0/pr-13340.toml b/changelog/5.0.0/pr-13340.toml new file mode 100644 index 000000000000..6a639d77ca4f --- /dev/null +++ b/changelog/5.0.0/pr-13340.toml @@ -0,0 +1,8 @@ +type = "c" +message = "OpenSearch 2 Support" + +pulls = ["13340", "graylog-plugin-enterprise#4005"] + +details.user = """ +Initial support for OpenSearch 2 +""" diff --git a/changelog/5.0.0/pr-13410.toml b/changelog/5.0.0/pr-13410.toml new file mode 100644 index 000000000000..93d6c6af1ca0 --- /dev/null +++ b/changelog/5.0.0/pr-13410.toml @@ -0,0 +1,11 @@ +type = "c" +message = "Disable rollup columns by default for new aggregations." + +issues = ["11516"] + +pulls = ["13410", "13690"] + +details.user = """ +For most users, rollup columns are not useful by default. Instead, +they can be useful in select configurations, so they should be opt-in instead of opt-out. +""" diff --git a/changelog/5.0.0/pr-13423.toml b/changelog/5.0.0/pr-13423.toml new file mode 100644 index 000000000000..289bdff1d406 --- /dev/null +++ b/changelog/5.0.0/pr-13423.toml @@ -0,0 +1,7 @@ +type = "added" +message = "Enabling search filter feature (Enterprise)." + +pulls = ["13640"] +details.user = """ +Search filters are a new feature in Graylog that allows creating reusable and shareable query snippets. +""" diff --git a/changelog/5.0.0/pr-13498.toml b/changelog/5.0.0/pr-13498.toml new file mode 100644 index 000000000000..03fe018fbbb4 --- /dev/null +++ b/changelog/5.0.0/pr-13498.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Display current page as active in navigation." + +pulls = ["13498"] diff --git a/changelog/5.0.0/pr-13507.toml b/changelog/5.0.0/pr-13507.toml new file mode 100644 index 000000000000..cde99f7962e9 --- /dev/null +++ b/changelog/5.0.0/pr-13507.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix server --dump-default-config command" + +issues = [""] +pulls = ["13507"] diff --git a/changelog/5.0.0/pr-13510.toml b/changelog/5.0.0/pr-13510.toml new file mode 100644 index 000000000000..248c7c03eca9 --- /dev/null +++ b/changelog/5.0.0/pr-13510.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Allow pinning row pivot columns in data table widget." + +issues = ["Graylog2/graylog-plugin-enterprise#249"] +pulls = ["13510"] diff --git a/changelog/5.0.0/pr-13541.toml b/changelog/5.0.0/pr-13541.toml new file mode 100644 index 000000000000..e32f20991816 --- /dev/null +++ b/changelog/5.0.0/pr-13541.toml @@ -0,0 +1,5 @@ +type = "s" +message = "Update Jackson to latest stable version 2.13.4" + +issues = ["Graylog2/graylog-plugin-enterprise#3674"] +pulls = ["13541"] diff --git a/changelog/5.0.0/pr-13553.toml b/changelog/5.0.0/pr-13553.toml new file mode 100644 index 000000000000..b3b2da84b54b --- /dev/null +++ b/changelog/5.0.0/pr-13553.toml @@ -0,0 +1,5 @@ +type = "r" +message = "Remove unused and dysfunctional `MetricsHistoryResource`." + +issues = ["2443"] +pulls = ["13553"] diff --git a/changelog/5.0.0/pr-13577.toml b/changelog/5.0.0/pr-13577.toml new file mode 100644 index 000000000000..8a9c189074f2 --- /dev/null +++ b/changelog/5.0.0/pr-13577.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Unify position of 'create new entity' buttons in page header." + +pulls = ["13577"] diff --git a/changelog/5.0.0/pr-13581.toml b/changelog/5.0.0/pr-13581.toml new file mode 100644 index 000000000000..853aaf3401ad --- /dev/null +++ b/changelog/5.0.0/pr-13581.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Move submit button in widget edit mode next to configuration form." + +pulls = ["13581"] diff --git a/changelog/5.0.0/pr-13643.toml b/changelog/5.0.0/pr-13643.toml new file mode 100644 index 000000000000..82377a6e8b3c --- /dev/null +++ b/changelog/5.0.0/pr-13643.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Show in field select if fields are qualified for metric function." + +pulls = ["13643"] diff --git a/changelog/5.0.0/pr-13677.toml b/changelog/5.0.0/pr-13677.toml new file mode 100644 index 000000000000..9fd2d51b20a5 --- /dev/null +++ b/changelog/5.0.0/pr-13677.toml @@ -0,0 +1,9 @@ +type = "changed" +message = "Improve the way we display the navigation for subareas of a page." + +details.user = """ +Unifies the way we display page navigations by displaying tabs below the main navigation. +Before, these links were displayed as buttons in the right top corner of the page header. +""" + +pulls = ["13677"] diff --git a/changelog/5.0.0/pr-13691.toml b/changelog/5.0.0/pr-13691.toml new file mode 100644 index 000000000000..6d616d8e42f6 --- /dev/null +++ b/changelog/5.0.0/pr-13691.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Unify position of documentation links in page headers." + +pulls = ["13691"] diff --git a/changelog/5.0.0/pr-13710.toml b/changelog/5.0.0/pr-13710.toml new file mode 100644 index 000000000000..14bc87c2dc67 --- /dev/null +++ b/changelog/5.0.0/pr-13710.toml @@ -0,0 +1,12 @@ +type = "changed" +message = "Update jersey from 2.32 to 2.37" + +issues = [""] +pulls = ["13710"] + +contributors = [""] + +details.user = """ +This is needed for Java 17 support: +https://github.com/eclipse-ee4j/jersey/pull/4754 +""" diff --git a/changelog/5.0.0/pr-13736.toml b/changelog/5.0.0/pr-13736.toml new file mode 100644 index 000000000000..f8fce9e16a09 --- /dev/null +++ b/changelog/5.0.0/pr-13736.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Allow setting TCP-keepalive for all sockets used in HTTP notifications." + +pulls = ["13736"] diff --git a/changelog/5.0.0/pr-13782.toml b/changelog/5.0.0/pr-13782.toml new file mode 100644 index 000000000000..90f887ccc2e2 --- /dev/null +++ b/changelog/5.0.0/pr-13782.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Allow \"Show Top Values\" action for enumerable compound fields." + +pulls = ["137892"] diff --git a/changelog/5.0.0/pr-13817.toml b/changelog/5.0.0/pr-13817.toml new file mode 100644 index 000000000000..42bad900c1e8 --- /dev/null +++ b/changelog/5.0.0/pr-13817.toml @@ -0,0 +1,10 @@ +type = "fixed" +message = "Filter autocompletion suggestions to currently selected streams." + +pulls = ["13553"] + +details.user = """ +In previous versions, when the currently selected streams share their index sets with other, inaccessible streams, autocompletion +suggestions could potentially contain values from documents which are not readable by the current user. This is fixed by +adding a filter to the request which is limiting values to the currently selected streams. +""" diff --git a/changelog/5.0.0/pr-13873.toml b/changelog/5.0.0/pr-13873.toml new file mode 100644 index 000000000000..a2f3a27487a8 --- /dev/null +++ b/changelog/5.0.0/pr-13873.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Mask custom properties for Kafka inputs on the inputs page to hide sensitive information." + +issues = ["13383"] +pulls = ["13873"] diff --git a/changelog/5.0.0/pr-13879.toml b/changelog/5.0.0/pr-13879.toml new file mode 100644 index 000000000000..58aace1b2e11 --- /dev/null +++ b/changelog/5.0.0/pr-13879.toml @@ -0,0 +1,17 @@ +type = "security" +message = "Update Netty from 4.1.60 to 4.1.84" + +pulls = ["13879"] + +details.user = """ +Updating the Netty library from 4.1.60 to 4.1.84 +includes plenty of fixes. +Please refer to https://netty.io/news for all the details. +Some of those fixes, adress security issues. +Here's a list of all fixed CVEs: + - CVE-2021-21409 - request smuggling + - CVE-2021-37136 - Bzip2Decoder doesn't allow setting size restrictions for decompressed data + - CVE-2021-37137 - SnappyFrameDecoder doesn't restrict chunk length any may buffer skippable chunks in an unnecessary way + - CVE-2021-21409 - Validate Content-Length header in HTTP/2 decoder + - CVE-2021-43797 - HTTP fails to validate against control chars in header names which may lead to HTTP request smuggling +""" diff --git a/changelog/5.0.0/pr-13943.toml b/changelog/5.0.0/pr-13943.toml new file mode 100644 index 000000000000..22d00fe0c4be --- /dev/null +++ b/changelog/5.0.0/pr-13943.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Change default of search window and execution frequency for new event configurations from 1 to 5 minutes." + +issues = ["Graylog2/graylog-plugin-enterprise#3740"] +pulls = ["13943", "Graylog2/graylog-plugin-enterprise#4343"] diff --git a/changelog/5.0.0/pr-13955.toml b/changelog/5.0.0/pr-13955.toml new file mode 100644 index 000000000000..03273bf5e07f --- /dev/null +++ b/changelog/5.0.0/pr-13955.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Isolate errors in result extraction to search type(s) causing it" + +pulls = ["13955"] diff --git a/changelog/5.0.0/pr-13960.toml b/changelog/5.0.0/pr-13960.toml new file mode 100644 index 000000000000..ede5a3954aa5 --- /dev/null +++ b/changelog/5.0.0/pr-13960.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Change default value for `http_thread_pool_size` and `proxied_requests_thread_pool_size` to 64." + +issues = ["Graylog2/graylog-plugin-enterprise#4325"] +pulls = ["13960"] diff --git a/changelog/5.0.0/pr-13971.toml b/changelog/5.0.0/pr-13971.toml new file mode 100644 index 000000000000..bb4f989f1d6f --- /dev/null +++ b/changelog/5.0.0/pr-13971.toml @@ -0,0 +1,11 @@ +type = "removed" +message = "Remove ineffective settings for output buffer processor thread pool." + +pulls = ["13971"] + +details.user = """ +Removed the following obsolete settings because they did not have the suggested effect: + +- `outputbuffer_processor_threads_max_pool_size` +- `outputbuffer_processor_keep_alive_time` +""" diff --git a/changelog/5.0.0/pr-13980.toml b/changelog/5.0.0/pr-13980.toml new file mode 100644 index 000000000000..60c59b343397 --- /dev/null +++ b/changelog/5.0.0/pr-13980.toml @@ -0,0 +1,4 @@ +type = "add" +message = "Update `bin/graylogctl` to support a bundled JVM." + +pulls = ["13980"] diff --git a/changelog/5.0.0/pr-14017.toml b/changelog/5.0.0/pr-14017.toml new file mode 100644 index 000000000000..d6d69fc906e7 --- /dev/null +++ b/changelog/5.0.0/pr-14017.toml @@ -0,0 +1,16 @@ +type = "changed" +message = "Sorting options in aggregations have been limited for deterministic results." + +issues = ["13957"] +pulls = ["14017"] + +details.user = """ +Unfortunately, the new bucketing strategy produces better results, but does not support all combinations of multi-sorts. +In order to only support combinations which provide deterministic results, sorting for new widgets has been limited to: + + - Only one sort if a grouping field is used + - One or more metrics/series + +No combinations are allowed. Old widgets are still supported, but may not always return the expected results. Due to the +shortcomings of the old bucketing strategy, this might have already been the case. +""" diff --git a/integration-tests/src/test/resources/integration/seeds/mongodb/graylog/index_failures.bson b/changelog/5.1.0-rc.1/.gitkeep similarity index 100% rename from integration-tests/src/test/resources/integration/seeds/mongodb/graylog/index_failures.bson rename to changelog/5.1.0-rc.1/.gitkeep diff --git a/changelog/5.1.0-rc.1/issue-1065.toml b/changelog/5.1.0-rc.1/issue-1065.toml new file mode 100644 index 000000000000..796f663649d9 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-1065.toml @@ -0,0 +1,11 @@ +type = "fixed" +message = "Fix Maxmind database types in cloud environment." + +pulls = ["13911"] + +details.user = """ +We need to reduce the available database types for Maxmind, +in the cloud environment and no longer allow database path customization. + +Because the user has no access to the file system in the cloud environment. +""" diff --git a/changelog/5.1.0-rc.1/issue-11131.toml b/changelog/5.1.0-rc.1/issue-11131.toml new file mode 100644 index 000000000000..1f0056e70262 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-11131.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Provide ability to configure an email attribute for LDAP." + +issues = ["11131"] +pulls = ["14041"] diff --git a/changelog/5.1.0-rc.1/issue-11183.toml b/changelog/5.1.0-rc.1/issue-11183.toml new file mode 100644 index 000000000000..2d31575b1ccb --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-11183.toml @@ -0,0 +1,8 @@ +type = "fixed" +message = "Properly escape event content for HTML email notifications" + +issues = ["11183"] +pulls = ["14286"] + +contributors = ["zhu"] + diff --git a/changelog/5.1.0-rc.1/issue-11342.toml b/changelog/5.1.0-rc.1/issue-11342.toml new file mode 100644 index 000000000000..986a500a9ced --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-11342.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Improved validation of grok pattern definition." + +issues = ["11342"] +pulls = ["14853"] diff --git a/changelog/5.1.0-rc.1/issue-11379.toml b/changelog/5.1.0-rc.1/issue-11379.toml new file mode 100644 index 000000000000..63ee0447a1f4 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-11379.toml @@ -0,0 +1,10 @@ +type = "added" +message = "Add global session timeout configuration." + +issues = ["11379"] +pulls = ["14343"] + +details.user = """ +Introducing a cluster-global configuration for user session timeouts. +When enabled, it overrides any per-user timeout settings. +""" diff --git a/changelog/5.1.0-rc.1/issue-11689.toml b/changelog/5.1.0-rc.1/issue-11689.toml new file mode 100644 index 000000000000..2ec8e3dc0bdb --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-11689.toml @@ -0,0 +1,8 @@ +type = "fixed" +message = "Fix count based index rotation on idle indices." + +issues = ["11689"] +pulls = ["14512"] + +contributors = [""] + diff --git a/changelog/5.1.0-rc.1/issue-12555.toml b/changelog/5.1.0-rc.1/issue-12555.toml new file mode 100644 index 000000000000..f3151d66eae4 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-12555.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Generate an event in the `System Event` stream for every system notification" +details.user = "Any of the existing event notification types can thus be applied to system notifictions. E.g. to send an email for certain types of notifications." +issues = ["12555", "14345"] +pulls = ["13833"] diff --git a/changelog/5.1.0-rc.1/issue-12857.toml b/changelog/5.1.0-rc.1/issue-12857.toml new file mode 100644 index 000000000000..84a88c66cee0 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-12857.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Trim whitespace characters on stream names." + +pulls = ["14359"] diff --git a/changelog/5.1.0-rc.1/issue-12959.toml b/changelog/5.1.0-rc.1/issue-12959.toml new file mode 100644 index 000000000000..ab6b640774bd --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-12959.toml @@ -0,0 +1,7 @@ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "added" +message = "Add support to skip TLS verification on HTTP event notifications." + +issues = ["12959"] +pulls = [""] + diff --git a/changelog/5.1.0-rc.1/issue-12963.toml b/changelog/5.1.0-rc.1/issue-12963.toml new file mode 100644 index 000000000000..7832e2167aa3 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-12963.toml @@ -0,0 +1,7 @@ +type = "a" +message = """ +Adds a unique `username` index to the `user` collection to prevent creation of duplicate users. +Any existing duplicates are resolved by appending the user ID. +""" +issues = ["12963"] +pulls = ["14649"] diff --git a/changelog/5.1.0-rc.1/issue-12996.toml b/changelog/5.1.0-rc.1/issue-12996.toml new file mode 100644 index 000000000000..495fe3654b4f --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-12996.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fixes `too_long_http_line_exception`/`HTTP/1.1 413 FULL head` problem when Graylog Event Processor operates on huge number of indices" + +issues = ["12996","13675"] +pulls = ["13790","graylog-plugin-enterprise#4252","13718","13722", "graylog-plugin-enterprise#4213"] diff --git a/changelog/5.1.0-rc.1/issue-13023.toml b/changelog/5.1.0-rc.1/issue-13023.toml new file mode 100644 index 000000000000..fa258b2c0719 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13023.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Support filter queries when listing authentication backends. We were ignoring them." + +issues = ["13023"] +pulls = ["14270"] diff --git a/changelog/5.1.0-rc.1/issue-13221.toml b/changelog/5.1.0-rc.1/issue-13221.toml new file mode 100644 index 000000000000..a5c0fd38d777 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13221.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fixes parsing, so that ES/OS errors without index data are handled properly" + +issues = ["13221"] +pulls = ["14203"] diff --git a/changelog/5.1.0-rc.1/issue-13596.toml b/changelog/5.1.0-rc.1/issue-13596.toml new file mode 100644 index 000000000000..ae8826b3bf20 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13596.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Wrapping `latest` metric with filter clause for existence of field." + +issues = ["13596"] +pulls = ["14815"] diff --git a/changelog/5.1.0-rc.1/issue-13628.toml b/changelog/5.1.0-rc.1/issue-13628.toml new file mode 100644 index 000000000000..811f2070f253 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13628.toml @@ -0,0 +1,5 @@ +type = "changed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Consistent use of message identifiers in strings. Message id visible in message list." + +issues = ["13628"] +pulls = ["14562"] diff --git a/changelog/5.1.0-rc.1/issue-13825.toml b/changelog/5.1.0-rc.1/issue-13825.toml new file mode 100644 index 000000000000..de5ab37bd29a --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13825.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Allow creating notification in event definition wizard when no notification exists" + +issues = ["13825"] +pulls = ["15286"] diff --git a/changelog/5.1.0-rc.1/issue-13852.toml b/changelog/5.1.0-rc.1/issue-13852.toml new file mode 100644 index 000000000000..723dc281125a --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13852.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Implement option to filter streams overview based on stream status." + +issues = ["13852"] +pulls = ["14640", "14690"] diff --git a/changelog/5.1.0-rc.1/issue-13872.toml b/changelog/5.1.0-rc.1/issue-13872.toml new file mode 100644 index 000000000000..df128c835235 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13872.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix time-based rotation check when closed indices are present." + +issues = ["13872"] +pulls = ["15326"] diff --git a/changelog/5.1.0-rc.1/issue-13874.toml b/changelog/5.1.0-rc.1/issue-13874.toml new file mode 100644 index 000000000000..0b1397036922 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13874.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Clone Action style on Collectors configurations list" + +issues = ["13874"] +pulls = ["13899"] diff --git a/changelog/5.1.0-rc.1/issue-13930.toml b/changelog/5.1.0-rc.1/issue-13930.toml new file mode 100644 index 000000000000..d53e73a186bc --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13930.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Display streams overview as a table to improve usability." + +issues = ["13887"] +pulls = ["13930"] diff --git a/changelog/5.1.0-rc.1/issue-13982.toml b/changelog/5.1.0-rc.1/issue-13982.toml new file mode 100644 index 000000000000..2598b3890c73 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-13982.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Standardize sort icons for data table and message list." + +issues = ["13982"] +pulls = ["13987"] diff --git a/changelog/5.1.0-rc.1/issue-14074.toml b/changelog/5.1.0-rc.1/issue-14074.toml new file mode 100644 index 000000000000..8c3fb772732e --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14074.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Start `JSON path value from HTTP API` input on leader node only, if `Global` option was selected in input configuration." + +issues = ["14074"] diff --git a/changelog/5.1.0-rc.1/issue-14086.toml b/changelog/5.1.0-rc.1/issue-14086.toml new file mode 100644 index 000000000000..00ce70bcf42e --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14086.toml @@ -0,0 +1,15 @@ +type = "c" +message = "Changed bulk indexing retry failure log-level from error to warning." + +issues = ["14086"] +pulls = ["14088"] + +details.user = """ +The log-level for OpenSearch/Elasticsearch bulk indexing retry attempts has been changed from `ERROR` to `WARN`. + +While bulk indexing retries might indicate an issue with the Opensearch backend, it also might be a temporary condition +that would resolve on its own (for example, temporary high memory pressure). Warn is a better log-level for such a case. + +Example message: +> WARN Caught exception during bulk indexing: [specific error], retrying (attempt #1). +""" diff --git a/changelog/5.1.0-rc.1/issue-14140.toml b/changelog/5.1.0-rc.1/issue-14140.toml new file mode 100644 index 000000000000..3d8f4dfe234c --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14140.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Saved searches can be now sorted by 3 additional fields: owner, description and summary." + +issues = ["14140"] +pulls = ["14355"] diff --git a/changelog/5.1.0-rc.1/issue-14142.toml b/changelog/5.1.0-rc.1/issue-14142.toml new file mode 100644 index 000000000000..43f4e89e9be9 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14142.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix CIDR parsing of IP in query validation" + +issues = ["14142"] +pulls = ["14204"] diff --git a/changelog/5.1.0-rc.1/issue-14153.toml b/changelog/5.1.0-rc.1/issue-14153.toml new file mode 100644 index 000000000000..481ebda9f1f7 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14153.toml @@ -0,0 +1,8 @@ +type = "fixed" +message = "Improve JVM security provider compatibility by switching to a widely supported cipher transformation" + +issues = ["14153"] +pulls = ["14193"] + +contributors = [""] + diff --git a/changelog/5.1.0-rc.1/issue-14164.toml b/changelog/5.1.0-rc.1/issue-14164.toml new file mode 100644 index 000000000000..1d07e2e85a4f --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14164.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Avoid `GET` APIs for cluster/deflector health to avoid overly long requests." + +issues = ["14164"] +pulls = ["14177"] diff --git a/changelog/5.1.0-rc.1/issue-14214.toml b/changelog/5.1.0-rc.1/issue-14214.toml new file mode 100644 index 000000000000..082c71b8e60f --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14214.toml @@ -0,0 +1,13 @@ +type = "fixed" +message = "Provide query suggestions for numerical fields." + +pulls = ["14279"] + +details.user = """ +Previous implementation used prefix query, which is only usable for textual fields. This fix adds a scripted query +that can handle suggestions for numerical fields as well. + +Additionally, the timerange is now applied for suggestions as well, limiting the suggestions to requested time range. +This should narrow down suggestions and significantly improve response times of suggestions. + +""" diff --git a/changelog/5.1.0-rc.1/issue-14229.toml b/changelog/5.1.0-rc.1/issue-14229.toml new file mode 100644 index 000000000000..409f0d3acef5 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14229.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "added an info box for auto assigned configs using tags" + +issues = ["14229"] +pulls = ["14565"] diff --git a/changelog/5.1.0-rc.1/issue-14234.toml b/changelog/5.1.0-rc.1/issue-14234.toml new file mode 100644 index 000000000000..f4d4a102f117 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14234.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "LUT - Show no results message on empty search return and fix delete cache functionality" + +issues = ["14234", "14238"] + +contributors = [""] diff --git a/changelog/5.1.0-rc.1/issue-14254.toml b/changelog/5.1.0-rc.1/issue-14254.toml new file mode 100644 index 000000000000..d84b2a57d5fa --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14254.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add support for a reply-to address in email notifications." + +issues = ["14254"] +pulls = ["14337"] diff --git a/changelog/5.1.0-rc.1/issue-14268.toml b/changelog/5.1.0-rc.1/issue-14268.toml new file mode 100644 index 000000000000..a2561943d3c6 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14268.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix concatenation of query strings in export." + +issues = ["14268"] +pulls = ["14284"] diff --git a/changelog/5.1.0-rc.1/issue-14280.toml b/changelog/5.1.0-rc.1/issue-14280.toml new file mode 100644 index 000000000000..4ce1c46326c3 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14280.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Sorting of indices inside an index set is based on their numbers, descending." + +issues = ["14280"] +pulls = ["14339"] diff --git a/changelog/5.1.0-rc.1/issue-14285.toml b/changelog/5.1.0-rc.1/issue-14285.toml new file mode 100644 index 000000000000..98503e186fee --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14285.toml @@ -0,0 +1,5 @@ +type = "c" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Changed event definitions and notifications to new paginated list and entity list ui component. This enables bulk deletion of event definitions and notifications and bulk enabling/disabling event definitions." + +issues = ["14285"] +pulls = ["14502"] diff --git a/changelog/5.1.0-rc.1/issue-14302.toml b/changelog/5.1.0-rc.1/issue-14302.toml new file mode 100644 index 000000000000..aa927432ec33 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14302.toml @@ -0,0 +1,11 @@ +type = "c" +message = "Prevent deletion of event definitions that are still referenced in other definitions." + +issues = ["14302"] +pulls = ["#14792", "Graylog2/graylog-plugin-enterprise#4765"] + +details.user = """ +An event definition d1 may be referenced from another definition d2, specifically a correlation event. +Deleting d1 at this time results in unexpected behavior. We now block deletion and show a warning, +displaying the list of dependent events from which the definition must be removed, prior to deletion. +""" diff --git a/changelog/5.1.0-rc.1/issue-14318.toml b/changelog/5.1.0-rc.1/issue-14318.toml new file mode 100644 index 000000000000..32af6d542332 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14318.toml @@ -0,0 +1,7 @@ +type = "fixed" +message = "Fix filtering and loader on the Lookup Table pages" + +issues = ["14318"] +pulls = ["14405"] + + diff --git a/changelog/5.1.0-rc.1/issue-14404.toml b/changelog/5.1.0-rc.1/issue-14404.toml new file mode 100644 index 000000000000..9c61e7be5b67 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14404.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix grouping in aggregation builder for fields which only exist in the All Events, All System Events or Processing and Indexing Failures stream." + +issues = ["14387"] +pulls = ["14404"] diff --git a/changelog/5.1.0-rc.1/issue-14428.toml b/changelog/5.1.0-rc.1/issue-14428.toml new file mode 100644 index 000000000000..d36326fb7574 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14428.toml @@ -0,0 +1,16 @@ +type = "changed" +message = "Disable two TLS ciphers that are considered weak." + +issues = ["14428"] +pulls = ["14592"] + +contributors = [""] + +details.user = """ +This change removes these two weak ciphers from our default configuration: + + https://ciphersuite.info/cs/TLS_RSA_WITH_AES_128_GCM_SHA256/ + https://ciphersuite.info/cs/TLS_RSA_WITH_AES_256_GCM_SHA384/ + +If you need them, add TLSv1.1 to the `enabled_tls_protocols` setting. +""" diff --git a/changelog/5.1.0-rc.1/issue-14433.toml b/changelog/5.1.0-rc.1/issue-14433.toml new file mode 100644 index 000000000000..e71ac352e092 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14433.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add support for encrypted storage of secret input configuration parameters." + +issues = ["14433"] +pulls = ["14459"] diff --git a/changelog/5.1.0-rc.1/issue-14540.toml b/changelog/5.1.0-rc.1/issue-14540.toml new file mode 100644 index 000000000000..e101eb61da37 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14540.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add replay search pages for events, alerts and event definitions" + +issues = ["14540"] +pulls = ["14657"] diff --git a/changelog/5.1.0-rc.1/issue-14544.toml b/changelog/5.1.0-rc.1/issue-14544.toml new file mode 100644 index 000000000000..f149e7eb4ebb --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14544.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add functionality which allows user create event definition from values" + +issues = ["14544"] +pulls = ["15054"] diff --git a/changelog/5.1.0-rc.1/issue-14668.toml b/changelog/5.1.0-rc.1/issue-14668.toml new file mode 100644 index 000000000000..faea84200f64 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14668.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Adds a welcome page link in the menu if a user has a start page set." + +issues = ["14668"] +pulls = ["14736"] diff --git a/changelog/5.1.0-rc.1/issue-14669.toml b/changelog/5.1.0-rc.1/issue-14669.toml new file mode 100644 index 000000000000..08a205892070 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14669.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Adds the default search page as an option to the start pages in the user's profile." + +issues = ["14669"] +pulls = ["14739"] diff --git a/changelog/5.1.0-rc.1/issue-14693.toml b/changelog/5.1.0-rc.1/issue-14693.toml new file mode 100644 index 000000000000..2f5ca60b21bb --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14693.toml @@ -0,0 +1,7 @@ +type = "fixed" +message = "Fix extractors reordering issues" + +issues = ["14693"] +pulls = ["14757"] + + diff --git a/changelog/5.1.0-rc.1/issue-14735.toml b/changelog/5.1.0-rc.1/issue-14735.toml new file mode 100644 index 000000000000..75bb0c99f51d --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14735.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Instead of showing all configurations on one page it is now seperated into sections with a sidebar navigation." + +issues = ["14735"] +pulls = ["15025"] diff --git a/changelog/5.1.0-rc.1/issue-14746.toml b/changelog/5.1.0-rc.1/issue-14746.toml new file mode 100644 index 000000000000..d33d1a85135f --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14746.toml @@ -0,0 +1,10 @@ +type = "c" +message = "Generate a system notification when an aggregation search errors out unexpectedly." + +details.user = """ +An event definition with aggregation will fail silently (with just a log message) when encountering an ES/OS error. +We now also generate a notification to alert the admin to the problem. +""" + +issues = ["14746"] +pulls = ["14967"] diff --git a/changelog/5.1.0-rc.1/issue-14767.toml b/changelog/5.1.0-rc.1/issue-14767.toml new file mode 100644 index 000000000000..78811bbbefd2 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14767.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added an \"Undo Reset\" button to fields in input configuration forms." + +issues = ["14767"] +pulls = ["15147", "graylog-plugin-enterprise#4964"] diff --git a/changelog/5.1.0-rc.1/issue-14787.toml b/changelog/5.1.0-rc.1/issue-14787.toml new file mode 100644 index 000000000000..93dfc89170fc --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14787.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix the way we display long field names in aggregation grouping configuration." + +issues = ["14787"] +pulls = ["14789"] diff --git a/changelog/5.1.0-rc.1/issue-14826.toml b/changelog/5.1.0-rc.1/issue-14826.toml new file mode 100644 index 000000000000..7d8bec9339d4 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14826.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Store selected filters and search query on streams and dashboards overview as URL query params." + +issues = ["14826"] +pulls = ["15011"] diff --git a/changelog/5.1.0-rc.1/issue-14827.toml b/changelog/5.1.0-rc.1/issue-14827.toml new file mode 100644 index 000000000000..125b39d5ad7d --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14827.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Implement index set filter for streams overview." + +issues = ["14827"] +pulls = ["14905"] diff --git a/changelog/5.1.0-rc.1/issue-14828.toml b/changelog/5.1.0-rc.1/issue-14828.toml new file mode 100644 index 000000000000..ca7d4fe70348 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14828.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Implement creation date filter for streams overview." + +issues = ["14828"] +pulls = ["14966"] diff --git a/changelog/5.1.0-rc.1/issue-14881.toml b/changelog/5.1.0-rc.1/issue-14881.toml new file mode 100644 index 000000000000..cc4d7579e9a4 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14881.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Implement bulk start and stop action for streams overview." + +issues = ["14881"] +pulls = ["14882", '14883'] diff --git a/changelog/5.1.0-rc.1/issue-14885.toml b/changelog/5.1.0-rc.1/issue-14885.toml new file mode 100644 index 000000000000..e3e77ad8abf0 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14885.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Combine dashboard and widget query when replaying search for dashboard widget." + +issues = ["14885"] +pulls = ["15285"] diff --git a/changelog/5.1.0-rc.1/issue-14891.toml b/changelog/5.1.0-rc.1/issue-14891.toml new file mode 100644 index 000000000000..f77b9cf2fe39 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14891.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Introduce ability to simulate a single pipeline rule in isolation." + +issues = ["14891"] +pulls = ["14892"] diff --git a/changelog/5.1.0-rc.1/issue-14900.toml b/changelog/5.1.0-rc.1/issue-14900.toml new file mode 100644 index 000000000000..22876632b8d8 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14900.toml @@ -0,0 +1,8 @@ +type = "added" +message = "Add telemetry for generic usage metric collection" + +issues = ["14900"] +pulls = ["14979"] + + + diff --git a/changelog/5.1.0-rc.1/issue-14907.toml b/changelog/5.1.0-rc.1/issue-14907.toml new file mode 100644 index 000000000000..c49e200dbe69 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14907.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix overflow problem with grouping section in aggregation builder." + +issues = ["14907"] +pulls = ["14934"] diff --git a/changelog/5.1.0-rc.1/issue-14939.toml b/changelog/5.1.0-rc.1/issue-14939.toml new file mode 100644 index 000000000000..9fdd19887fe6 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14939.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix sidecar process button position issue on safari" + +issues = ["14939"] +pulls = ["15293"] diff --git a/changelog/5.1.0-rc.1/issue-14940.toml b/changelog/5.1.0-rc.1/issue-14940.toml new file mode 100644 index 000000000000..06c3c7552801 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14940.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix visibility of events and notification definition pages." + +issues = ["14940"] +pulls = ["15052"] diff --git a/changelog/5.1.0-rc.1/issue-14952.toml b/changelog/5.1.0-rc.1/issue-14952.toml new file mode 100644 index 000000000000..d30ee2330e52 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-14952.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixed bug causing input status data not to be removed when an input was deleted." + +issues = ["14952"] +pulls = ["14954","Graylog2/forwarder#99"] diff --git a/changelog/5.1.0-rc.1/issue-15012.toml b/changelog/5.1.0-rc.1/issue-15012.toml new file mode 100644 index 000000000000..95c1046a869b --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15012.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Improve performance by avoiding unnecessary attempts to create index for access tokens." + +issues = ["15012"] +pulls = ["15097"] diff --git a/changelog/5.1.0-rc.1/issue-15013.toml b/changelog/5.1.0-rc.1/issue-15013.toml new file mode 100644 index 000000000000..cda4c3ce9041 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15013.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Avoid excessive DB requests for maintaining token last access time." + +issues = ["15013"] +pulls = [""] diff --git a/changelog/5.1.0-rc.1/issue-15030.toml b/changelog/5.1.0-rc.1/issue-15030.toml new file mode 100644 index 000000000000..87747f35c063 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15030.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Adds (now always missing) timezone to the export command, if set in the REST call." + +issues = ["15030"] +pulls = ["15296"] diff --git a/changelog/5.1.0-rc.1/issue-15073.toml b/changelog/5.1.0-rc.1/issue-15073.toml new file mode 100644 index 000000000000..350b7b56648c --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15073.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix overlay problem with data table header and dashboard tabs dropdown." + +issues = ["15073"] +pulls = ["15091"] diff --git a/changelog/5.1.0-rc.1/issue-15129.toml b/changelog/5.1.0-rc.1/issue-15129.toml new file mode 100644 index 000000000000..8399bd7fe7c8 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15129.toml @@ -0,0 +1,10 @@ +type = "fixed" +message = "Fix NodeService#allActive() implementation, returning only nodes of correct type" + +issues = ["15129"] +pulls = ["15139"] + +details.user = """ +With the addition of the DATANODE type to the NodeService, the allActive method would return also datanodes. This +may lead to problems in calls that expect only SERVER type. +""" diff --git a/changelog/5.1.0-rc.1/issue-15209.toml b/changelog/5.1.0-rc.1/issue-15209.toml new file mode 100644 index 000000000000..30c308db68be --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15209.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Unable to add custom field when creating event definition" + +issues = ["15209"] +pulls = ["15226"] diff --git a/changelog/5.1.0-rc.1/issue-15238.toml b/changelog/5.1.0-rc.1/issue-15238.toml new file mode 100644 index 000000000000..82ee7e15d909 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15238.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Include URLs for authentication service(s) in the CSP header connect-src." + +issues = ["15238"] +pulls = ["15283", "Graylog2/graylog-plugin-enterprise#5053"] diff --git a/changelog/5.1.0-rc.1/issue-15244.toml b/changelog/5.1.0-rc.1/issue-15244.toml new file mode 100644 index 000000000000..5f2f386d30e2 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15244.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix show telemetry settings for local admin." + +issues = ["15244"] +pulls = ["15247"] diff --git a/changelog/5.1.0-rc.1/issue-15277.toml b/changelog/5.1.0-rc.1/issue-15277.toml new file mode 100644 index 000000000000..18d1fcbce633 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-15277.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix ConcurrentModificationException when listing input states." + +issues = ["15277"] diff --git a/changelog/5.1.0-rc.1/issue-3741.toml b/changelog/5.1.0-rc.1/issue-3741.toml new file mode 100644 index 000000000000..b5a07f03fac1 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-3741.toml @@ -0,0 +1,7 @@ +type = "fixed" +message = 'Improve position, visibilty and default of the "Grace Period" option when creating an Alert.' + +issues = ["3741"] +pulls = ["3741"] + + diff --git a/changelog/5.1.0-rc.1/issue-3853.toml b/changelog/5.1.0-rc.1/issue-3853.toml new file mode 100644 index 000000000000..2d9770202a47 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-3853.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added configurable default timezone to syslogs inputs and parse tz field from FortiGate msg's" + +issues = ["3853"] +pulls = ["14737", "graylog-labs/syslog4j-graylog2#41", "Graylog2/graylog-plugin-integrations#1332" ] diff --git a/changelog/5.1.0-rc.1/issue-4774.toml b/changelog/5.1.0-rc.1/issue-4774.toml new file mode 100644 index 000000000000..197c450b43e2 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-4774.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix show Index Failure section on System Overview for Cloud." + +issues = ["4774"] +pulls = ["15398"] + diff --git a/changelog/5.1.0-rc.1/issue-4889.toml b/changelog/5.1.0-rc.1/issue-4889.toml new file mode 100644 index 000000000000..c220a7f0e8f8 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-4889.toml @@ -0,0 +1,7 @@ +type = "c" +message = "GL will no longer perform a temporary redirect based on the Host header." + +issues = ["Graylog2/graylog-plugin-enterprise#4889"] +pulls = ["15067"] + + diff --git a/changelog/5.1.0-rc.1/issue-5653.toml b/changelog/5.1.0-rc.1/issue-5653.toml new file mode 100644 index 000000000000..4b3dcd14328e --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-5653.toml @@ -0,0 +1,5 @@ +type = "a" +message = 'Allow pipeline function "remove_field" to take a regex pattern instead of a field name.' + +issues = ["5653"] +pulls = ["15131"] diff --git a/changelog/5.1.0-rc.1/issue-6486.toml b/changelog/5.1.0-rc.1/issue-6486.toml new file mode 100644 index 000000000000..3cba6b6fab7e --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-6486.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixes to_date() pipeline conversion function to honor an optional timezone argument." + +issues = ["6486"] +pulls = ["14252"] diff --git a/changelog/5.1.0-rc.1/issue-6527.toml b/changelog/5.1.0-rc.1/issue-6527.toml new file mode 100644 index 000000000000..3fed9a6192a0 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-6527.toml @@ -0,0 +1,5 @@ +type = "a" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Add `normalize_fields` pipeline rule function for normalizing message field names." + +issues = ["6527"] +pulls = ["14636"] diff --git a/changelog/5.1.0-rc.1/issue-6867.toml b/changelog/5.1.0-rc.1/issue-6867.toml new file mode 100644 index 000000000000..7994d7f57661 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-6867.toml @@ -0,0 +1,5 @@ +type = "f" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Only executed widgets of current page ond dashboards." + +issues = ["6867"] +pulls = ["14890"] diff --git a/changelog/5.1.0-rc.1/issue-8506.toml b/changelog/5.1.0-rc.1/issue-8506.toml new file mode 100644 index 000000000000..39273ba977cc --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-8506.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Implement option to copy a dashboard page to another dashboard." + +pulls = ["14348"] +issues = ["8506"] diff --git a/changelog/5.1.0-rc.1/issue-8592.toml b/changelog/5.1.0-rc.1/issue-8592.toml new file mode 100644 index 000000000000..928b23827072 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-8592.toml @@ -0,0 +1,5 @@ +type = "removed" +message = "Removed garbage collection warnings." + +issues = ["8592"] +pulls = ["14298"] diff --git a/changelog/5.1.0-rc.1/issue-9173.toml b/changelog/5.1.0-rc.1/issue-9173.toml new file mode 100644 index 000000000000..a626e6cd88a7 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-9173.toml @@ -0,0 +1,5 @@ +type = "added" +message = 'Introduces a new pipeline function "lookup_has_value" to determine if a given key is present in the lookup table.' + +issues = ["9173"] +pulls = ["15143"] diff --git a/changelog/5.1.0-rc.1/issue-9438.toml b/changelog/5.1.0-rc.1/issue-9438.toml new file mode 100644 index 000000000000..deab5af1e7a2 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-9438.toml @@ -0,0 +1,10 @@ +type = "added" +message = "Added ability to replay the search that triggered an Event." + +issues = ["9438"] +pulls = ["13931"] + +details.user = """ +On each expanded Event/Alert description, there is now a Replay Search option that will open a search page showing the +results of the query that triggered the event. +""" diff --git a/changelog/5.1.0-rc.1/issue-9461.toml b/changelog/5.1.0-rc.1/issue-9461.toml new file mode 100644 index 000000000000..68bd308c0d99 --- /dev/null +++ b/changelog/5.1.0-rc.1/issue-9461.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added output_batch_size to the values exposed by the system/configuration API call." + +issues = ["9461"] +pulls = ["14123"] diff --git a/changelog/5.1.0-rc.1/pr-13018.toml b/changelog/5.1.0-rc.1/pr-13018.toml new file mode 100644 index 000000000000..9327ff570e93 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-13018.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add support for configuring Index Set Defaults" + +issues = ["graylog-plugin-enterprise#3264", "graylog-plugin-enterprise#3319"] +pulls = ["13018"] diff --git a/changelog/5.1.0-rc.1/pr-13780.toml b/changelog/5.1.0-rc.1/pr-13780.toml new file mode 100644 index 000000000000..c463e994aefe --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-13780.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Refreshed and modernized look of login dialog." + +pulls = ["13780"] diff --git a/changelog/5.1.0-rc.1/pr-13970.toml b/changelog/5.1.0-rc.1/pr-13970.toml new file mode 100644 index 000000000000..dac8a9afe5a6 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-13970.toml @@ -0,0 +1,4 @@ +type = "add" +message = "Replacing getting started guide with start page which lists recently opened and favorite saved searches and dashboards and recent activity." + +pulls = ["13970"] diff --git a/changelog/5.1.0-rc.1/pr-13985.toml b/changelog/5.1.0-rc.1/pr-13985.toml new file mode 100644 index 000000000000..6a7eb24aa878 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-13985.toml @@ -0,0 +1,4 @@ +type = "add" +message = "Dynamic Startup Page Backend additions for Recent Activity, Pinned Items, Last Opened" + +pulls = ["13985"] diff --git a/changelog/5.1.0-rc.1/pr-14063.toml b/changelog/5.1.0-rc.1/pr-14063.toml new file mode 100644 index 000000000000..fad2602c996c --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14063.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix connectivity check when Elasticsearch/OpenSearch is not available." + +pulls = ["14063"] diff --git a/changelog/5.1.0-rc.1/pr-14067.toml b/changelog/5.1.0-rc.1/pr-14067.toml new file mode 100644 index 000000000000..d4c44c7221ad --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14067.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix potential duplicates in outputs during node restarts." + +pulls = ["14067"] diff --git a/changelog/5.1.0-rc.1/pr-14085.toml b/changelog/5.1.0-rc.1/pr-14085.toml new file mode 100644 index 000000000000..5031765f1aee --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14085.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Add option to sort streams returned by API endpoint to be sorted by index set title." + +pulls = ["14085"] diff --git a/changelog/5.1.0-rc.1/pr-14096.toml b/changelog/5.1.0-rc.1/pr-14096.toml new file mode 100644 index 000000000000..8d3b0180ee39 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14096.toml @@ -0,0 +1,13 @@ +type = "added" +message = "Add REST API endpoint for bulk assignment of streams to index sets." + +pulls = ["14096"] + +details.user = """ +A REST API endpoint has been added supporting a bulk assignment of streams to an index set. This allows e.g. migrating +a number of streams to a new index set. The endpoint is available as: + +PUT /api/streams/indexSet/{index set id} + +with a JSON body of an array of stream ids that should be assigned to this index set. +""" diff --git a/changelog/5.1.0-rc.1/pr-14097.toml b/changelog/5.1.0-rc.1/pr-14097.toml new file mode 100644 index 000000000000..78b379d8152d --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14097.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Prevent RejectedExecutionException during shutdown with a large number of outputs." + +pulls = ["14097"] diff --git a/changelog/5.1.0-rc.1/pr-14105.toml b/changelog/5.1.0-rc.1/pr-14105.toml new file mode 100644 index 000000000000..e82abfc73dc7 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14105.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Display dashboards overview as a table to improve usability." + +pulls = ["14105"] diff --git a/changelog/5.1.0-rc.1/pr-14110.toml b/changelog/5.1.0-rc.1/pr-14110.toml new file mode 100644 index 000000000000..6489f8a0c5ed --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14110.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Removes ability for users to change Event Definition Condition Type if it is not meant to be changeable." + +pulls = ["14110"] diff --git a/changelog/5.1.0-rc.1/pr-14154.toml b/changelog/5.1.0-rc.1/pr-14154.toml new file mode 100644 index 000000000000..19d19f93fc93 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14154.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix uncommittedMessages metric for non-empty journal before first commit." + +pulls = ["14154"] diff --git a/changelog/5.1.0-rc.1/pr-14171.toml b/changelog/5.1.0-rc.1/pr-14171.toml new file mode 100644 index 000000000000..e545f49f3361 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14171.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Triggering field types refetching upon search refresh." + +pulls = ["14171"] diff --git a/changelog/5.1.0-rc.1/pr-14199.toml b/changelog/5.1.0-rc.1/pr-14199.toml new file mode 100644 index 000000000000..f790cc92e49c --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14199.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Reduce the default connection and read timeouts for email sending from 60 seconds to 10 seconds" + +pulls = ["14199"] diff --git a/changelog/5.1.0-rc.1/pr-14223.toml b/changelog/5.1.0-rc.1/pr-14223.toml new file mode 100644 index 000000000000..e0d4472626a5 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14223.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Display saved searches overview as a table to improve usability." + +pulls = ["14223"] diff --git a/changelog/5.1.0-rc.1/pr-14225.toml b/changelog/5.1.0-rc.1/pr-14225.toml new file mode 100644 index 000000000000..c67db558aa66 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14225.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix output shutdown triggering twice when the ouput is globally deleted." + +pulls = ["14225"] diff --git a/changelog/5.1.0-rc.1/pr-14239.toml b/changelog/5.1.0-rc.1/pr-14239.toml new file mode 100644 index 000000000000..7da96cd119b2 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14239.toml @@ -0,0 +1,17 @@ +type = "added" +message = "Providing plugin API to supply custom query input commands." + +pulls = ["14239"] + +details.user = """ +A new frontend plugin API is provided that allows plugins to contribute custom commands to the query input. It allows to +provide two different entities: + + - a command definition (`views.queryInput.commands`) specifying a usage scope, name, mac/windows key bindings as well + as a function that is being executed, which gets an `Editor` instance as well as a `CustomCommandContext` object + - a context provider (`views.queryInput.commandContextProviders`) which allows a plugin to extend the `CustomCommandContext` + object which is supplied + +Part of the context is the `usage`, which specifies the scope the query input is currently running in. It can be one of +`'search_query' | 'widget_query' | 'global_override_query'` currently. +""" diff --git a/changelog/5.1.0-rc.1/pr-14240.toml b/changelog/5.1.0-rc.1/pr-14240.toml new file mode 100644 index 000000000000..c0f8a8390b89 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14240.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Forwarder inputs can be used for the 'match input' stream rule." + +issues = ["graylog-plugin-cloud#1066"] +pulls = ["14240", "graylog-plugin-enterprise#4485"] diff --git a/changelog/5.1.0-rc.1/pr-14262.toml b/changelog/5.1.0-rc.1/pr-14262.toml new file mode 100644 index 000000000000..08b7103e627b --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14262.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Sorting streams case-insensitive in API." + +pulls = ["14262"] diff --git a/changelog/5.1.0-rc.1/pr-14269.toml b/changelog/5.1.0-rc.1/pr-14269.toml new file mode 100644 index 000000000000..399db41f356d --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14269.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Added the option to toggle between a linear and a logarithmic axis for area/bar/line/scatter charts." + +pulls = ["14269"] diff --git a/changelog/5.1.0-rc.1/pr-14277.toml b/changelog/5.1.0-rc.1/pr-14277.toml new file mode 100644 index 000000000000..49d85e809695 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14277.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Avoid vanishing grouping direction when dragged." + +pulls = ["14277"] diff --git a/changelog/5.1.0-rc.1/pr-14283.toml b/changelog/5.1.0-rc.1/pr-14283.toml new file mode 100644 index 000000000000..9634be94aab9 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14283.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix non-stop loading indicator issue in authentication backends" + +issues = ["13023"] +pulls = ["14283"] diff --git a/changelog/5.1.0-rc.1/pr-14331.toml b/changelog/5.1.0-rc.1/pr-14331.toml new file mode 100644 index 000000000000..549bf9e8d392 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14331.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Adding inline widget creation in empty slots on grid." + +pulls = ["14331"] diff --git a/changelog/5.1.0-rc.1/pr-14354.toml b/changelog/5.1.0-rc.1/pr-14354.toml new file mode 100644 index 000000000000..c62a0ffedeb1 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14354.toml @@ -0,0 +1,4 @@ +type = "s" +message = "Update to freemarker 2.3.31 and use a more secure default configuration." + +pulls = ["14354"] diff --git a/changelog/5.1.0-rc.1/pr-14370.toml b/changelog/5.1.0-rc.1/pr-14370.toml new file mode 100644 index 000000000000..049848ba8aee --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14370.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Remove single-quotes from jvm.memory metric names" + +issues = ["graylog-plugin-enterprise#4394"] +pulls = ["14370"] diff --git a/changelog/5.1.0-rc.1/pr-14409.toml b/changelog/5.1.0-rc.1/pr-14409.toml new file mode 100644 index 000000000000..b6e30f4fea8f --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14409.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Reduce logging and add metric for \"Couldn't remove default stream\" warning." + +issues = ["graylog-plugin-enterprise#3481"] +pulls = ["14409"] diff --git a/changelog/5.1.0-rc.1/pr-14417.toml b/changelog/5.1.0-rc.1/pr-14417.toml new file mode 100644 index 000000000000..ef8883735e55 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14417.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Graylog DataNode. Initial version." + +issues = [] +pulls = ["14417"] diff --git a/changelog/5.1.0-rc.1/pr-14424.toml b/changelog/5.1.0-rc.1/pr-14424.toml new file mode 100644 index 000000000000..295395dacad4 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14424.toml @@ -0,0 +1,24 @@ +type = "added" +message = "Introduces a new index rotation strategy combining features of size-based and time-based strategies." + +issues = ["graylog-plugin-enterprise#4447"] +pulls = ["14424", "graylog-plugin-enterprise#4572"] + +details.user = """ +The new index rotation strategy aims to make it easier to achieve good resource usage, +i.e. shards that are neither too small nor too big, while avoiding excessive number +of shards. This has been difficult to date, especially when faced with variable ingest +rate. +""" + +details.ops = """ +`time_size_optimizing` is the new default rotation strategy, as it provides the best results +with a minimum of user input. The default settings are based on ES/OS sharding recommendations and generally +do not need to be changed. +The following configuration file settings are available, if you wish to modify the behavior +for your requirements: +`time_size_optimizing_rotation_min_shard_size=20g`: Shard should be at least this big before rotating. +`time_size_optimizing_rotation_max_shard_size=50g`: Rotate if shard exceeds this size limit. +`time_size_optimizing_retention_min_lifetime=30d`: The minimum / maximum number of days the data in the index is kept before it is retained. +`time_size_optimizing_retention_max_lifetime=40d`: The minimum / maximum number of days the data in the index is kept before it is retained. +""" diff --git a/changelog/5.1.0-rc.1/pr-14435.toml b/changelog/5.1.0-rc.1/pr-14435.toml new file mode 100644 index 000000000000..410a288829ba --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14435.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added a page for tracking failures in Sidecar." + +issues = ["collector-sidecar#433"] +pulls = ["14435"] diff --git a/changelog/5.1.0-rc.1/pr-14437.toml b/changelog/5.1.0-rc.1/pr-14437.toml new file mode 100644 index 000000000000..7249e0b8ea54 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14437.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Expose JVM metrics to prometheus." + +issues = ["graylog-plugin-enterprise#4393"] +pulls = ["14437"] diff --git a/changelog/5.1.0-rc.1/pr-14446.toml b/changelog/5.1.0-rc.1/pr-14446.toml new file mode 100644 index 000000000000..ea173a31ced9 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14446.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Supporting nested and tupled groupings in aggregations." + +pulls = ["14446"] diff --git a/changelog/5.1.0-rc.1/pr-14462.toml b/changelog/5.1.0-rc.1/pr-14462.toml new file mode 100644 index 000000000000..0d33a212dedd --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14462.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fix possible HTTP API thread pool exhaustion by processing proxied cluster metrics requests asynchronously." + +pulls = ["14462"] diff --git a/changelog/5.1.0-rc.1/pr-14550.toml b/changelog/5.1.0-rc.1/pr-14550.toml new file mode 100644 index 000000000000..432ef45e8b50 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14550.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Add OpenTelemetry tracing instrumentation to select code paths." + +pulls = ["14550"] diff --git a/changelog/5.1.0-rc.1/pr-14566.toml b/changelog/5.1.0-rc.1/pr-14566.toml new file mode 100644 index 000000000000..36b2c4b002e3 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14566.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add option to bulk delete dashboards in dashboards overview." + +pulls = ["14566"] diff --git a/changelog/5.1.0-rc.1/pr-14567.toml b/changelog/5.1.0-rc.1/pr-14567.toml new file mode 100644 index 000000000000..465e8060828b --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14567.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add option to bulk delete saved searches in saved searches overview." + +pulls = ["14567"] diff --git a/changelog/5.1.0-rc.1/pr-14575.toml b/changelog/5.1.0-rc.1/pr-14575.toml new file mode 100644 index 000000000000..0c96245a0d88 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14575.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixing missing offset handling in Legacy Searches." + +pulls = ["14575"] diff --git a/changelog/5.1.0-rc.1/pr-14591.toml b/changelog/5.1.0-rc.1/pr-14591.toml new file mode 100644 index 000000000000..3ca4e04508da --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14591.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Persist selected sorting, displayed columns and page size in streams overview for each user." + +issues = ["14293"] +pulls = ["14591"] diff --git a/changelog/5.1.0-rc.1/pr-14598.toml b/changelog/5.1.0-rc.1/pr-14598.toml new file mode 100644 index 000000000000..26a09529957c --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14598.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Persist selected sorting, displayed columns and page size in dashboards overview for each user." + +issues = ["14293"] +pulls = ["14598"] diff --git a/changelog/5.1.0-rc.1/pr-14600.toml b/changelog/5.1.0-rc.1/pr-14600.toml new file mode 100644 index 000000000000..fe2c0168d766 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14600.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Persist selected sorting, displayed columns and page size in saved searches overview for each user." + +issues = ["14293"] +pulls = ["14600"] diff --git a/changelog/5.1.0-rc.1/pr-14605.toml b/changelog/5.1.0-rc.1/pr-14605.toml new file mode 100644 index 000000000000..d3b571c852ce --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14605.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Handle deprecated short time zone IDs in job scheduler definitions." + +issues = ["Graylog2/graylog-plugin-enterprise#4311"] +pulls = ["14605"] diff --git a/changelog/5.1.0-rc.1/pr-14616.toml b/changelog/5.1.0-rc.1/pr-14616.toml new file mode 100644 index 000000000000..a19fb5ecab97 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14616.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Remove undesired retention strategies in Cloud" + +issues = ["graylog-plugin-cloud#1081"] +pulls = ["14616"] diff --git a/changelog/5.1.0-rc.1/pr-14654.toml b/changelog/5.1.0-rc.1/pr-14654.toml new file mode 100644 index 000000000000..7566cd1567f3 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14654.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Hide sort/pin icons in data table when non-interactive" + +pulls = ["14654"] diff --git a/changelog/5.1.0-rc.1/pr-14659.toml b/changelog/5.1.0-rc.1/pr-14659.toml new file mode 100644 index 000000000000..75e90474b693 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14659.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Additional `preventLoops` parameter in `clone_message()` function to prevent creation of endless loops." + +issues = ["5759"] +pulls = ["14659"] diff --git a/changelog/5.1.0-rc.1/pr-14670.toml b/changelog/5.1.0-rc.1/pr-14670.toml new file mode 100644 index 000000000000..8be05de13f8e --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14670.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Improved CSV File Adapter error handling." + +issues = ["graylog-plugin-enterprise#3821"] +pulls = ["14670"] diff --git a/changelog/5.1.0-rc.1/pr-14688.toml b/changelog/5.1.0-rc.1/pr-14688.toml new file mode 100644 index 000000000000..c9e8c8d94407 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14688.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Extend browser support back to Chrome 68." + +issues = ["14677"] +pulls = ["14688"] diff --git a/changelog/5.1.0-rc.1/pr-14692.toml b/changelog/5.1.0-rc.1/pr-14692.toml new file mode 100644 index 000000000000..44066b3a2cd0 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14692.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Escape $ properly when part of value that is added to query." + +pulls = ["14692"] diff --git a/changelog/5.1.0-rc.1/pr-14720.toml b/changelog/5.1.0-rc.1/pr-14720.toml new file mode 100644 index 000000000000..785518dd6a56 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14720.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Changing default refresh interval to five seconds." + +pulls = ["14720"] +isses = ["Graylog2/graylog-plugin-enterprise#4567"] diff --git a/changelog/5.1.0-rc.1/pr-14723.toml b/changelog/5.1.0-rc.1/pr-14723.toml new file mode 100644 index 000000000000..f7d5f8e8c638 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14723.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Scheduling next search refresh only after current one completed." + +pulls = ["14723"] diff --git a/changelog/5.1.0-rc.1/pr-14725.toml b/changelog/5.1.0-rc.1/pr-14725.toml new file mode 100644 index 000000000000..fc3504b2306f --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14725.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Unify fields configuration in aggregation builder and messages export modal." + +pulls = ["14725"] diff --git a/changelog/5.1.0-rc.1/pr-14726.toml b/changelog/5.1.0-rc.1/pr-14726.toml new file mode 100644 index 000000000000..0f649fa971ea --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14726.toml @@ -0,0 +1,7 @@ +type = "added" +message = "Report runtime failures for polling Inputs." + +issues = [""] +pulls = ["14726"] + +contributors = [""] diff --git a/changelog/5.1.0-rc.1/pr-14742.toml b/changelog/5.1.0-rc.1/pr-14742.toml new file mode 100644 index 000000000000..a5132d163d0f --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14742.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Allow user-configurable auto-refresh settings & default interval." + +pulls = ["14742"] diff --git a/changelog/5.1.0-rc.1/pr-14749.toml b/changelog/5.1.0-rc.1/pr-14749.toml new file mode 100644 index 000000000000..2902c47b8cbe --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14749.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Enabling the simple search/scripting API." + +pulls = ["14749"] diff --git a/changelog/5.1.0-rc.1/pr-14762.toml b/changelog/5.1.0-rc.1/pr-14762.toml new file mode 100644 index 000000000000..469e32fb30b4 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14762.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Avoid full-page refresh when showing node details from message details." + +pulls = ["14762"] diff --git a/changelog/5.1.0-rc.1/pr-14791.toml b/changelog/5.1.0-rc.1/pr-14791.toml new file mode 100644 index 000000000000..6673183ec12b --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14791.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fix parsing of date math expressions in query validation" + +pulls = ["14791"] diff --git a/changelog/5.1.0-rc.1/pr-14805.toml b/changelog/5.1.0-rc.1/pr-14805.toml new file mode 100644 index 000000000000..3ac2fb85c0e2 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14805.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Preventing accidental hiding of columns in reporting widgets." + +pulls = ["14805"] +issues = ["Graylog2/graylog-plugin-enterprise#4761"] diff --git a/changelog/5.1.0-rc.1/pr-14823.toml b/changelog/5.1.0-rc.1/pr-14823.toml new file mode 100644 index 000000000000..cd3120329145 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14823.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Preventing accidental hiding of columns in message list widget for reporting." + +pulls = ["14823"] diff --git a/changelog/5.1.0-rc.1/pr-14884.toml b/changelog/5.1.0-rc.1/pr-14884.toml new file mode 100644 index 000000000000..f6e9b8721d17 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14884.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix `WARNING: sun.reflect.Reflection.getCallerClass is not supported.` on server startup." + +issues = ["#7223","#9802", "#11634"] +pulls = ["14884"] diff --git a/changelog/5.1.0-rc.1/pr-14897.toml b/changelog/5.1.0-rc.1/pr-14897.toml new file mode 100644 index 000000000000..b64da8f841d5 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14897.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add dashboards, searches, messages and events to an open investigation." + +pulls = ["14897"] +issues = ["Graylog2/graylog-plugin-enterprise#4794"] diff --git a/changelog/5.1.0-rc.1/pr-14925.toml b/changelog/5.1.0-rc.1/pr-14925.toml new file mode 100644 index 000000000000..4edc4de41daa --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14925.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix pagination of sidecar admin page when collectors are selected." + +issues = ["14924"] +pulls = ["14925"] diff --git a/changelog/5.1.0-rc.1/pr-14926.toml b/changelog/5.1.0-rc.1/pr-14926.toml new file mode 100644 index 000000000000..e4fe7f761ecc --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14926.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Avoid excessive index creation for AccessToken." + +pulls = ["14926"] +issues = ["Graylog2/graylog-plugin-enterprise#4850"] diff --git a/changelog/5.1.0-rc.1/pr-14938.toml b/changelog/5.1.0-rc.1/pr-14938.toml new file mode 100644 index 000000000000..247409d1a625 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14938.toml @@ -0,0 +1,6 @@ +type = "added" +message = "Add support bundle export feature." + +issues = [""] +pulls = ["14938"] + diff --git a/changelog/5.1.0-rc.1/pr-14941.toml b/changelog/5.1.0-rc.1/pr-14941.toml new file mode 100644 index 000000000000..8fd981070a93 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14941.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Added preflight UI for indexer configuration" + +pull = ["14941"] diff --git a/changelog/5.1.0-rc.1/pr-14953.toml b/changelog/5.1.0-rc.1/pr-14953.toml new file mode 100644 index 000000000000..3cc2b2eef5da --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14953.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Merging Threat Intel Plugin into server repository." + +pulls = ["14953"] diff --git a/changelog/5.1.0-rc.1/pr-14965.toml b/changelog/5.1.0-rc.1/pr-14965.toml new file mode 100644 index 000000000000..e85c6a1a7e2c --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14965.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Make sure all bindings which use createIndex are threadsafe and make them singletons." + +pulls = ["14965"] +issues = ["Graylog2/graylog-plugin-enterprise#4862"] diff --git a/changelog/5.1.0-rc.1/pr-14974.toml b/changelog/5.1.0-rc.1/pr-14974.toml new file mode 100644 index 000000000000..5b74af3769fd --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14974.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix error on missing permission when creating email event notifications. Adds `users:list` permission to role User Inspector" + +pulls = ["14974"] +issues = ["Graylog2/graylog-plugin-enterprise#4886"] diff --git a/changelog/5.1.0-rc.1/pr-14978.toml b/changelog/5.1.0-rc.1/pr-14978.toml new file mode 100644 index 000000000000..a2ae7e1b7349 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14978.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Remove stack trace from the generic server error response." + +pulls = ["14978"] +issues = ["Graylog2/graylog-plugin-enterprise#4891"] diff --git a/changelog/5.1.0-rc.1/pr-14987.toml b/changelog/5.1.0-rc.1/pr-14987.toml new file mode 100644 index 000000000000..624c847b0bd4 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14987.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Sending new header on server response X-Content-Type-Options:nosniff" + +issues = ["Graylog2/graylog-plugin-enterprise#4890"] +pull = ["14987"] diff --git a/changelog/5.1.0-rc.1/pr-14991.toml b/changelog/5.1.0-rc.1/pr-14991.toml new file mode 100644 index 000000000000..d15900adf903 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-14991.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Add a Content Security Policy header to responses." + +pulls = ["14991"] +issues = ["Graylog2/graylog-plugin-enterprise#4887"] diff --git a/changelog/5.1.0-rc.1/pr-15001.toml b/changelog/5.1.0-rc.1/pr-15001.toml new file mode 100644 index 000000000000..f3e47ef8d366 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15001.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added badge to navigation bar to show when a new Illuminate bundle is ready to download and install." + +pulls = ["15001"] +issues = ["Graylog2/graylog-plugin-enterprise#4868"] diff --git a/changelog/5.1.0-rc.1/pr-15016.toml b/changelog/5.1.0-rc.1/pr-15016.toml new file mode 100644 index 000000000000..96fba0f9f282 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15016.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Added support for CIDR lookups in CSV file data adapters" + +pulls = ["15016"] \ No newline at end of file diff --git a/changelog/5.1.0-rc.1/pr-15018.toml b/changelog/5.1.0-rc.1/pr-15018.toml new file mode 100644 index 000000000000..85990a6b6ac5 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15018.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Using database time for node registration and heartbeat" + +pull = ["15018"] diff --git a/changelog/5.1.0-rc.1/pr-15058.toml b/changelog/5.1.0-rc.1/pr-15058.toml new file mode 100644 index 000000000000..98b59b3927d2 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15058.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fixed CSV File adapter to only verify file read access" + +issues = ["14998"] +pulls = ["15058"] + diff --git a/changelog/5.1.0-rc.1/pr-15070.toml b/changelog/5.1.0-rc.1/pr-15070.toml new file mode 100644 index 000000000000..46ee129133b8 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15070.toml @@ -0,0 +1,4 @@ +type = "c" +message = "datanode: add conf option to be able to force single-node type" + +pull = ["15070"] diff --git a/changelog/5.1.0-rc.1/pr-15141.toml b/changelog/5.1.0-rc.1/pr-15141.toml new file mode 100644 index 000000000000..3eeeb9ee29a7 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15141.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix isFreshInstallation flag in setups with newly registred datanodes" + +pulls = ["15141"] diff --git a/changelog/5.1.0-rc.1/pr-15148.toml b/changelog/5.1.0-rc.1/pr-15148.toml new file mode 100644 index 000000000000..c913b3809f28 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15148.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Adds Add Evidence Modal" + +pulls = ["15148"] +issues = ["Graylog2/graylog-plugin-enterprise#4848"] diff --git a/changelog/5.1.0-rc.1/pr-15184.toml b/changelog/5.1.0-rc.1/pr-15184.toml new file mode 100644 index 000000000000..97079ad31662 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15184.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Set a specific Content Security Policy header for API-Browser responses." + +pulls = ["15184"] +issues = ["Graylog2/graylog-plugin-enterprise#4887"] diff --git a/changelog/5.1.0-rc.1/pr-15186.toml b/changelog/5.1.0-rc.1/pr-15186.toml new file mode 100644 index 000000000000..26f0be31d65d --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15186.toml @@ -0,0 +1,5 @@ +type = "security" +message = "Update Netty from 4.1.84 to 4.1.91" + +pulls = ["15186"] + diff --git a/changelog/5.1.0-rc.1/pr-15187.toml b/changelog/5.1.0-rc.1/pr-15187.toml new file mode 100644 index 000000000000..750da6c041d2 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15187.toml @@ -0,0 +1,4 @@ +type = "s" +message = "Update to Apache Shiro dependency to version 1.11.0. (the fixed CVEs don't affect Graylog)" + +pulls = ["15187"] diff --git a/changelog/5.1.0-rc.1/pr-15190.toml b/changelog/5.1.0-rc.1/pr-15190.toml new file mode 100644 index 000000000000..24bb090cd16b --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15190.toml @@ -0,0 +1,4 @@ +type = "s" +message = "Update json-path to 2.8.0 to fix a security issue in json-smart." + +pulls = ["15190"] diff --git a/changelog/5.1.0-rc.1/pr-15195.toml b/changelog/5.1.0-rc.1/pr-15195.toml new file mode 100644 index 000000000000..568a5f9248a2 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15195.toml @@ -0,0 +1,12 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "Fixed calculation and expression of CPU load in OshiProbe." + +issues = [""] +pulls = ["15195", "15183" ] + +contributors = ["panxt"] + diff --git a/changelog/5.1.0-rc.1/pr-15198.toml b/changelog/5.1.0-rc.1/pr-15198.toml new file mode 100644 index 000000000000..d361b0f6dd8d --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15198.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Update Opensearch distributed with Datanode to 2.5.0" + +pulls = ["15198"] diff --git a/changelog/5.1.0-rc.1/pr-15216.toml b/changelog/5.1.0-rc.1/pr-15216.toml new file mode 100644 index 000000000000..7d4bea62c430 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15216.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Added autodetection of opensearch distribution in datanode." + +pulls = ["15216"] diff --git a/changelog/5.1.0-rc.1/pr-15242.toml b/changelog/5.1.0-rc.1/pr-15242.toml new file mode 100644 index 000000000000..6dbc494e5bb0 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15242.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix OSHI system stats on docker cgroupv2." + +issues = [""] +pulls = ["15242"] + diff --git a/changelog/5.1.0-rc.1/pr-15295.toml b/changelog/5.1.0-rc.1/pr-15295.toml new file mode 100644 index 000000000000..8e7cd0b19370 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15295.toml @@ -0,0 +1,6 @@ +type = "added" +message = "Updating login background and claim." + +issues = ["15215"] +pulls = ["15295"] + diff --git a/changelog/5.1.0-rc.1/pr-15306.toml b/changelog/5.1.0-rc.1/pr-15306.toml new file mode 100644 index 000000000000..6361b7f51f09 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15306.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix query parser's tokenizer state after regex has ended." + +issues = ["Graylog2/graylog-plugin-enterprise#5014"] +pulls = ["15306"] + diff --git a/changelog/5.1.0-rc.1/pr-15329.toml b/changelog/5.1.0-rc.1/pr-15329.toml new file mode 100644 index 000000000000..1b98567ef827 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15329.toml @@ -0,0 +1,6 @@ +type = "f" +message = "Let cookie auth take precedence over basic auth. " + +issues = ["6831"] +pulls = ["15329"] + diff --git a/changelog/5.1.0-rc.1/pr-15400.toml b/changelog/5.1.0-rc.1/pr-15400.toml new file mode 100644 index 000000000000..e0327af5885f --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-15400.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixed possible unwanted concurrency in SystemJobManager" + +pulls = ["15400"] diff --git a/changelog/5.1.0-rc.1/pr-6711.toml b/changelog/5.1.0-rc.1/pr-6711.toml new file mode 100644 index 000000000000..61214e587697 --- /dev/null +++ b/changelog/5.1.0-rc.1/pr-6711.toml @@ -0,0 +1,19 @@ +type = "added" +message = "Make messages with identical timestamps sortable by ULID" + +issues = ["2741"] +pulls = ["6711"] + +contributors = [""] + +details.user = """ +If a batch of messages was received with identical timestamps +(the same millisecond), the original receive order is kept by the +encoded sequence number which directly follows the timestamp. +We embedd this into the ULID of the `gl2_message_id` field, +which can be used to sort these messages. + +CAVEAT: +The sequence number is generated per node and input. +This means that sorting will not work if an input is load balanced over multiple nodes. +""" diff --git a/changelog/5.2.0-rc.1/.gitkeep b/changelog/5.2.0-rc.1/.gitkeep new file mode 100644 index 000000000000..900f0cb27b2e --- /dev/null +++ b/changelog/5.2.0-rc.1/.gitkeep @@ -0,0 +1 @@ +# Keep the directory in Git \ No newline at end of file diff --git a/changelog/5.2.0-rc.1/ghsa-2q4p-f6gf-mqr5.toml b/changelog/5.2.0-rc.1/ghsa-2q4p-f6gf-mqr5.toml new file mode 100644 index 000000000000..c5e0e9f0dde1 --- /dev/null +++ b/changelog/5.2.0-rc.1/ghsa-2q4p-f6gf-mqr5.toml @@ -0,0 +1,2 @@ +type = "security" +message = "Fix partial path traversal vulnerability in Support Bundle feature. [GHSA-2q4p-f6gf-mqr5](https://github.com/Graylog2/graylog2-server/security/advisories/GHSA-2q4p-f6gf-mqr5)" diff --git a/changelog/5.2.0-rc.1/ghsa-3fqm-frhg-7c85.toml b/changelog/5.2.0-rc.1/ghsa-3fqm-frhg-7c85.toml new file mode 100644 index 000000000000..d39da8b7ea59 --- /dev/null +++ b/changelog/5.2.0-rc.1/ghsa-3fqm-frhg-7c85.toml @@ -0,0 +1,2 @@ +type = "security" +message = "Fix stale session cache after logout. [GHSA-3fqm-frhg-7c85](https://github.com/Graylog2/graylog2-server/security/advisories/GHSA-3fqm-frhg-7c85)" diff --git a/changelog/5.2.0-rc.1/ghsa-g96c-x7rh-99r3.toml b/changelog/5.2.0-rc.1/ghsa-g96c-x7rh-99r3.toml new file mode 100644 index 000000000000..e378ea968e01 --- /dev/null +++ b/changelog/5.2.0-rc.1/ghsa-g96c-x7rh-99r3.toml @@ -0,0 +1,2 @@ +type = "security" +message = "Fix insecure source port usage for DNS Lookup adapter queries. [GHSA-g96c-x7rh-99r3](https://github.com/Graylog2/graylog2-server/security/advisories/GHSA-g96c-x7rh-99r3)" diff --git a/changelog/5.2.0-rc.1/graylog-project-internal-pr-102.toml b/changelog/5.2.0-rc.1/graylog-project-internal-pr-102.toml new file mode 100644 index 000000000000..9a7bc360ef47 --- /dev/null +++ b/changelog/5.2.0-rc.1/graylog-project-internal-pr-102.toml @@ -0,0 +1,6 @@ +type = "r" +message = "Remove support for legacy Collector Sidecars." + +issues = [""] +pulls = ["Graylog2/graylog-project-internal#102"] + diff --git a/changelog/5.2.0-rc.1/issue-10682.toml b/changelog/5.2.0-rc.1/issue-10682.toml new file mode 100644 index 000000000000..6c24fd4a0408 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-10682.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Display indicator when dashboard has unsaved changes." + +issues = ["10682"] +pulls = ["16122"] diff --git a/changelog/5.2.0-rc.1/issue-11401.toml b/changelog/5.2.0-rc.1/issue-11401.toml new file mode 100644 index 000000000000..fa232fd8a884 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-11401.toml @@ -0,0 +1,6 @@ +type = "c" +message = "Display keyword in time range summary instead of from / to range. Display effective search execution time range in sidebar, which can be helpful for relative and keyword time ranges." + +issues = ["11401"] +pulls = ["15961"] + diff --git a/changelog/5.2.0-rc.1/issue-11465.toml b/changelog/5.2.0-rc.1/issue-11465.toml new file mode 100644 index 000000000000..c62fa086b75b --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-11465.toml @@ -0,0 +1,6 @@ +type = "c" +message = "Memorize state of time range tabs in time range picker while picker is open." + +issues = ["11465"] +pulls = ["16107"] + diff --git a/changelog/5.2.0-rc.1/issue-11592.toml b/changelog/5.2.0-rc.1/issue-11592.toml new file mode 100644 index 000000000000..a6c64daa63a7 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-11592.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Validate in aggregation builder if metric name is unique." + +issues = ["11592"] +pulls = ["16097"] + diff --git a/changelog/5.2.0-rc.1/issue-11868.toml b/changelog/5.2.0-rc.1/issue-11868.toml new file mode 100644 index 000000000000..02eaf3c6f5b0 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-11868.toml @@ -0,0 +1,10 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "Fix join() and substring() pipeline function parameter names." + +issues = ["11868"] +pulls = [""] + diff --git a/changelog/5.2.0-rc.1/issue-12165.toml b/changelog/5.2.0-rc.1/issue-12165.toml new file mode 100644 index 000000000000..5a8d6cc1e1a2 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-12165.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Do not show query input suggestions when using `Add to query` field value action." + +issues = ["12165"] +pulls = ["15885"] diff --git a/changelog/5.2.0-rc.1/issue-12498.toml b/changelog/5.2.0-rc.1/issue-12498.toml new file mode 100644 index 000000000000..57a09018999d --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-12498.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "We improved the index set overview page performance and added a title-based search feature." + +issues = ["12498"] +pulls = ["15155"] diff --git a/changelog/5.2.0-rc.1/issue-13975.toml b/changelog/5.2.0-rc.1/issue-13975.toml new file mode 100644 index 000000000000..3ec839893b5c --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-13975.toml @@ -0,0 +1,5 @@ +type = "a" +message = "The max number of indices is now persisted when changing strategies in the index retention strategy configuration" + +issues = ["13975"] +pulls = ["16860", "graylog-plugin-enterprise#5941"] diff --git a/changelog/5.2.0-rc.1/issue-14373.toml b/changelog/5.2.0-rc.1/issue-14373.toml new file mode 100644 index 000000000000..cbe6a5816aeb --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-14373.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix test lookup not accepting uppercase characters" + +issues = ["14373"] +pulls = ["16909"] diff --git a/changelog/5.2.0-rc.1/issue-14383.toml b/changelog/5.2.0-rc.1/issue-14383.toml new file mode 100644 index 000000000000..bca90ed6b1d9 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-14383.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add log messages like in the old JEST client to show nodes arriving/dropping from an ES/OS cluster" + +issues = ["14383"] +pulls = ["14898"] diff --git a/changelog/5.2.0-rc.1/issue-14661.toml b/changelog/5.2.0-rc.1/issue-14661.toml new file mode 100644 index 000000000000..143d47be1900 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-14661.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Make sure that `boolean` fields are returned as `true`|`false` in aggregations on Opensearch 2.4+." + +issues = ["14661"] +pulls = ["15809"] diff --git a/changelog/5.2.0-rc.1/issue-14793.toml b/changelog/5.2.0-rc.1/issue-14793.toml new file mode 100644 index 000000000000..2e623b37519a --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-14793.toml @@ -0,0 +1,11 @@ +type = "fixed" +message = "Adjust keybindings for query input autocompletion." + +issues = ["14793"] +pulls = ["16162"] + +details.user = """ +With #6909 we received the feedback that it would be useful to only press `Tab` once to insert the first autocomplete suggestion (instead of pressing it twice). As result we implemented a change to automatically focus the first suggestion. This led to a problem in a different use case. When you search for a custom value like ssh login the autocomplete suggested field names for login and pressing Return did not execute the search but inserted the first suggestion. +We are now no longer focusing the first suggestion and adjusting the behaviour for Tab key. When pressing Tab while no suggestion is focused we select and insert the first entry. +This way it is possible to press Return when searching for a custom value and it still requires only one press to insert the first suggestion. +""" diff --git a/changelog/5.2.0-rc.1/issue-15035.toml b/changelog/5.2.0-rc.1/issue-15035.toml new file mode 100644 index 000000000000..acd06f43989c --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15035.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix heatmap colorbar font color for dark mode." + +issues = ["15035"] +pulls = ["16129"] diff --git a/changelog/5.2.0-rc.1/issue-15084.toml b/changelog/5.2.0-rc.1/issue-15084.toml new file mode 100644 index 000000000000..e9f7b23118e9 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15084.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Improve readability of hover legend in aggregations." + +issues = ["15084"] +pulls = ["15888"] diff --git a/changelog/5.2.0-rc.1/issue-15095.toml b/changelog/5.2.0-rc.1/issue-15095.toml new file mode 100644 index 000000000000..c08bab152130 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15095.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Enable/Disable EventDefinition status when clicking on status badge." + +issues = ["15095"] +pulls = ["16203"] + diff --git a/changelog/5.2.0-rc.1/issue-15259.toml b/changelog/5.2.0-rc.1/issue-15259.toml new file mode 100644 index 000000000000..d5206b08e3f3 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15259.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix: BulkActions dialog not closing after clicking on Confirm." + +issues = ["15259"] +pulls = ["15384"] \ No newline at end of file diff --git a/changelog/5.2.0-rc.1/issue-15273.toml b/changelog/5.2.0-rc.1/issue-15273.toml new file mode 100644 index 000000000000..f27b1e974d96 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15273.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix pagination problems on Events page." + +issues = ["15273"] +pulls = ["15357"] diff --git a/changelog/5.2.0-rc.1/issue-15355.toml b/changelog/5.2.0-rc.1/issue-15355.toml new file mode 100644 index 000000000000..5770e5bf7375 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15355.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Implement share button for saved searches overview." + +issues = ["15355"] +pulls = ["16126"] diff --git a/changelog/5.2.0-rc.1/issue-15370.toml b/changelog/5.2.0-rc.1/issue-15370.toml new file mode 100644 index 000000000000..afdf1a7cada2 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15370.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Read CSP directives from a resource property file." + +issues = ["15370"] +pulls = ["15439"] diff --git a/changelog/5.2.0-rc.1/issue-15426.toml b/changelog/5.2.0-rc.1/issue-15426.toml new file mode 100644 index 000000000000..aeae2f528825 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15426.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Create time range quick access list for multiple time range types" + +pulls = ["15484"] diff --git a/changelog/5.2.0-rc.1/issue-15472.toml b/changelog/5.2.0-rc.1/issue-15472.toml new file mode 100644 index 000000000000..9629cf805178 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15472.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix problem with search bar submit button incorrectly indicating changes for keyword time range." + +issues = ["15472"] +pulls = ["15938"] diff --git a/changelog/5.2.0-rc.1/issue-15509.toml b/changelog/5.2.0-rc.1/issue-15509.toml new file mode 100644 index 000000000000..e79ac0eab254 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15509.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Apply pivot limits correctly when installing saved searches/dashboard from pre-5.1 content packs." + +issues = ["15509"] +pulls = ["15523"] diff --git a/changelog/5.2.0-rc.1/issue-15519.toml b/changelog/5.2.0-rc.1/issue-15519.toml new file mode 100644 index 000000000000..265a96114998 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15519.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Make sure that search is reexecuted when 'Add to query'/'Remove from query' on dashboards." + +issues = ["15519"] +pulls = ["15593"] diff --git a/changelog/5.2.0-rc.1/issue-15536.toml b/changelog/5.2.0-rc.1/issue-15536.toml new file mode 100644 index 000000000000..8fb72e828ac9 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15536.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Properly prefix frontend paths to prevent error when custom app path suffix is used." + +issues = ["15536"] +pulls = ["15540"] + diff --git a/changelog/5.2.0-rc.1/issue-15547.toml b/changelog/5.2.0-rc.1/issue-15547.toml new file mode 100644 index 000000000000..b041a5474606 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15547.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Use search query from URL params for dashboard page search input." + +issues = ["15536"] +pulls = ["15540"] + diff --git a/changelog/5.2.0-rc.1/issue-15570.toml b/changelog/5.2.0-rc.1/issue-15570.toml new file mode 100644 index 000000000000..6c62ed531807 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15570.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add sidecar graylog_host variable, Auditbeat collector for Linux and default configurations." + +issues = ["15570"] +pulls = ["15571"] diff --git a/changelog/5.2.0-rc.1/issue-15609.toml b/changelog/5.2.0-rc.1/issue-15609.toml new file mode 100644 index 000000000000..3994f749cd1b --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15609.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "When modifying a user's session timeout, the system will automatically update existing user sessions to ensure application of the new timeout settings." + +issues = ["15609"] +pulls = ["15657"] + diff --git a/changelog/5.2.0-rc.1/issue-15638.toml b/changelog/5.2.0-rc.1/issue-15638.toml new file mode 100644 index 000000000000..8664189751e4 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15638.toml @@ -0,0 +1,6 @@ +type = "f" +message = "Remove LastOpened and Favorite items from internal Entity List" + +issues = ["15638", "15574"] +pulls = ["15658"] + diff --git a/changelog/5.2.0-rc.1/issue-15642.toml b/changelog/5.2.0-rc.1/issue-15642.toml new file mode 100644 index 000000000000..9d769caff09f --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15642.toml @@ -0,0 +1,6 @@ +type = "f" +message = "Save modified search when query is changed through URL parameters." + +issues = ["15642"] +pulls = ["15653"] + diff --git a/changelog/5.2.0-rc.1/issue-15652.toml b/changelog/5.2.0-rc.1/issue-15652.toml new file mode 100644 index 000000000000..5e178d4dafe8 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15652.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Restore mail attribute for AD authentication." + +issues = ["15536"] +pulls = [""] + diff --git a/changelog/5.2.0-rc.1/issue-15678.toml b/changelog/5.2.0-rc.1/issue-15678.toml new file mode 100644 index 000000000000..4e9d4150caac --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15678.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Include link to replay an alert in alert mail notification template." + +issues = ["15678"] +pulls = ["15922"] diff --git a/changelog/5.2.0-rc.1/issue-15707.toml b/changelog/5.2.0-rc.1/issue-15707.toml new file mode 100644 index 000000000000..156123e9c98a --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15707.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fixes sorting with variance/stddev/sumofsquare metrics." + +issues = ["15707"] +pulls = ["15878"] diff --git a/changelog/5.2.0-rc.1/issue-15713.toml b/changelog/5.2.0-rc.1/issue-15713.toml new file mode 100644 index 000000000000..e4a9cbed794a --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15713.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Create undo/redo actions in search" + +pulls = ["15704"] diff --git a/changelog/5.2.0-rc.1/issue-15718.toml b/changelog/5.2.0-rc.1/issue-15718.toml new file mode 100644 index 000000000000..f1c737a362a4 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15718.toml @@ -0,0 +1,5 @@ +type = "f" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fixing problem with focus loss in query input autocompletion." + +issues = ["15718"] +pulls = ["15933"] diff --git a/changelog/5.2.0-rc.1/issue-15751.toml b/changelog/5.2.0-rc.1/issue-15751.toml new file mode 100644 index 000000000000..524936ff616a --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15751.toml @@ -0,0 +1,6 @@ +type = "A" +message = "Track when an event definition was last successfully matched." + +issues = ["15751"] +pulls = ["16071"] + diff --git a/changelog/5.2.0-rc.1/issue-15778.toml b/changelog/5.2.0-rc.1/issue-15778.toml new file mode 100644 index 000000000000..daa12e0a8d60 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15778.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Render values consistently between normal and full screen mode." + +issues = ["15778"] +pulls = ["15864"] + diff --git a/changelog/5.2.0-rc.1/issue-15841.toml b/changelog/5.2.0-rc.1/issue-15841.toml new file mode 100644 index 000000000000..cf66d29861a7 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15841.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fixes problems with editing Search Filters of entities installed via Content Packs. Search Filters of those types of entities are not inlined during Content Pack installation and can be edited." + +issues = ["15841"] +pulls = ["16085","Graylog2/graylog-plugin-enterprise#5560"] diff --git a/changelog/5.2.0-rc.1/issue-15913.toml b/changelog/5.2.0-rc.1/issue-15913.toml new file mode 100644 index 000000000000..3758319eb3b8 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15913.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Disable offline detection for REST API client." + +issues = ["15913"] +pulls = ["15914"] + diff --git a/changelog/5.2.0-rc.1/issue-15916.toml b/changelog/5.2.0-rc.1/issue-15916.toml new file mode 100644 index 000000000000..c759d4da7029 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15916.toml @@ -0,0 +1,6 @@ +type = "changed" +message = "On View Event Definition page added Duplicate button and removed Edit button for definitions that are scoped as immutable." + +issues = ["15916"] +pulls = ["16515"] + diff --git a/changelog/5.2.0-rc.1/issue-15958.toml b/changelog/5.2.0-rc.1/issue-15958.toml new file mode 100644 index 000000000000..14cb702b89ff --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15958.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Show errors again when loading a saved search or dashboard which exceeds the query limit." + +issues = ["15958"] +pulls = ["15998", "16033"] + diff --git a/changelog/5.2.0-rc.1/issue-15975.toml b/changelog/5.2.0-rc.1/issue-15975.toml new file mode 100644 index 000000000000..dd29e4b647cb --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15975.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add bulk action for streams overview to start a search with selected streams." + +issues = ["15975"] +pulls = ["16015"] diff --git a/changelog/5.2.0-rc.1/issue-15990.toml b/changelog/5.2.0-rc.1/issue-15990.toml new file mode 100644 index 000000000000..33349d3e4289 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-15990.toml @@ -0,0 +1,6 @@ +type = "added" +message = "Secure datanode REST API with the same SSL certificate as the one used for Opensearch HTTP" + +issues = ["15990"] +pulls = ["16058"] + diff --git a/changelog/5.2.0-rc.1/issue-16076.toml b/changelog/5.2.0-rc.1/issue-16076.toml new file mode 100644 index 000000000000..4651f5857871 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16076.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix TLS compatibility with older operating systems. (e.g. Win Server 2019)." + +issues = ["16076"] +pulls = [""] + diff --git a/changelog/5.2.0-rc.1/issue-16143.toml b/changelog/5.2.0-rc.1/issue-16143.toml new file mode 100644 index 000000000000..97a4329820a4 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16143.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Cluster initial manager nodes in datanode configurable." + +issues = ["16143"] +pulls = ["16146"] + diff --git a/changelog/5.2.0-rc.1/issue-16168.toml b/changelog/5.2.0-rc.1/issue-16168.toml new file mode 100644 index 000000000000..69da5c1ee86a --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16168.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Improve logic which controls when we show the query input completions." + +issues = ["16207"] +pulls = ["16168"] + diff --git a/changelog/5.2.0-rc.1/issue-16392.toml b/changelog/5.2.0-rc.1/issue-16392.toml new file mode 100644 index 000000000000..6bb312d4461e --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16392.toml @@ -0,0 +1,5 @@ +type = "f" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fixes validation of max index retention lifetime for time-size-optimizing indices." + +issues = ["Graylog2/graylog-plugin-enterprise#5685"] +pulls = ["16392"] diff --git a/changelog/5.2.0-rc.1/issue-16440.toml b/changelog/5.2.0-rc.1/issue-16440.toml new file mode 100644 index 000000000000..e743aa8f80b4 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16440.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Updating title of dashboard is now properly saved." + +issues = ["16440"] +pulls = ["16441"] diff --git a/changelog/5.2.0-rc.1/issue-16447.toml b/changelog/5.2.0-rc.1/issue-16447.toml new file mode 100644 index 000000000000..778be16fb89c --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16447.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Enable basic auth for datanode rest API if security is enabled" + +issues = ["16447"] +pulls = ["16446"] diff --git a/changelog/5.2.0-rc.1/issue-16474.toml b/changelog/5.2.0-rc.1/issue-16474.toml new file mode 100644 index 000000000000..e99d6d58167d --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16474.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixing broken streams page if streams without `matching_type` exist." + +issues = ["16474"] +pulls = ["16522"] diff --git a/changelog/5.2.0-rc.1/issue-16634.toml b/changelog/5.2.0-rc.1/issue-16634.toml new file mode 100644 index 000000000000..7ac88745f8d0 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16634.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixing error which occurred when saving a dashboard after deleting a page in the pages configuration modal." + +issues = ["16634"] +pulls = ["16674"] diff --git a/changelog/5.2.0-rc.1/issue-16828.toml b/changelog/5.2.0-rc.1/issue-16828.toml new file mode 100644 index 000000000000..6d5cc419bc01 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16828.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Geo-Location Processor Configuration modal being stuck after the updating the config failed." + +issues = ["16828"] +pulls = ["16926"] diff --git a/changelog/5.2.0-rc.1/issue-16893.toml b/changelog/5.2.0-rc.1/issue-16893.toml new file mode 100644 index 000000000000..4c849461e253 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16893.toml @@ -0,0 +1,10 @@ +type = "c" +message = "Unify auto-refresh behaviour in case there are form changes." + +issues = ["Graylog2/graylog-plugin-enterprise#5896"] +pulls = ["16893"] + +details.user = """ +We are now submiting the search, when starting the auto-refresh and there are not submitted changes. +We also unified the behaviour by stoping the auto-refresh, when there are changes, while the auto-refresh is active. +""" diff --git a/changelog/5.2.0-rc.1/issue-16913.toml b/changelog/5.2.0-rc.1/issue-16913.toml new file mode 100644 index 000000000000..cb150346e667 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-16913.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed error where only most recent revision of a content pack could be installed." + +issues = ["16913"] +pulls = ["16943"] diff --git a/changelog/5.2.0-rc.1/issue-5404.toml b/changelog/5.2.0-rc.1/issue-5404.toml new file mode 100644 index 000000000000..cc71a8e4aae6 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-5404.toml @@ -0,0 +1,5 @@ +type = "add" +message = "New Content Stream section to welcome page to show Rss feed and Release notice." + +issues = ["Graylog2/graylog-plugin-enterprise#5404"] +pulls = ["15843"] diff --git a/changelog/5.2.0-rc.1/issue-5793.toml b/changelog/5.2.0-rc.1/issue-5793.toml new file mode 100644 index 000000000000..71c5c852af94 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-5793.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixes search export, so that it takes search filters into consideration." + +issues = ["5793"] +pulls = ["16896"] diff --git a/changelog/5.2.0-rc.1/issue-6470.toml b/changelog/5.2.0-rc.1/issue-6470.toml new file mode 100644 index 000000000000..af5a81f47b2f --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-6470.toml @@ -0,0 +1,5 @@ +type = "a" +message = "New pipeline rule functions for map manipulation: map_set, map_remove." + +issues = ["6470"] +pulls = ["16174"] diff --git a/changelog/5.2.0-rc.1/issue-8128.toml b/changelog/5.2.0-rc.1/issue-8128.toml new file mode 100644 index 000000000000..4688de8550fe --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-8128.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Optional flag to invert condition of `remove_field` function." + +issues = ["8128"] +pulls = ["16182"] diff --git a/changelog/5.2.0-rc.1/issue-8633.toml b/changelog/5.2.0-rc.1/issue-8633.toml new file mode 100644 index 000000000000..e52fff3791d1 --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-8633.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix wrong lookup cache metrics after reconfiguration." + +issues = ["8633"] +pulls = [""] + diff --git a/changelog/5.2.0-rc.1/issue-9917.toml b/changelog/5.2.0-rc.1/issue-9917.toml new file mode 100644 index 000000000000..f77eef42d0eb --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-9917.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix GELF output to not block output processing." + +issues = ["9917", "graylog-plugin-enterprise#4623"] +pulls = ["15385"] diff --git a/changelog/5.2.0-rc.1/issue-integrations-1341.toml b/changelog/5.2.0-rc.1/issue-integrations-1341.toml new file mode 100644 index 000000000000..e464d14da45f --- /dev/null +++ b/changelog/5.2.0-rc.1/issue-integrations-1341.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added event definition title as a summary to Microsoft Teams notifications." + +issues = ["Graylog2/graylog-plugin-integrations#1341"] +pulls = ["Graylog2/graylog-plugin-integrations#1343"] diff --git a/changelog/5.2.0-rc.1/pr-14712.toml b/changelog/5.2.0-rc.1/pr-14712.toml new file mode 100644 index 000000000000..46582fa064a7 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-14712.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Updated AWS Java SDK to the latest version" + +issues = ["graylog-plugin-enterprise-integrations#971"] +pulls = ["14712"] diff --git a/changelog/5.2.0-rc.1/pr-15088.toml b/changelog/5.2.0-rc.1/pr-15088.toml new file mode 100644 index 000000000000..65dfbb5fbda9 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15088.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Run matrix tests with a given indexer version." + +pulls = ["15088"] diff --git a/changelog/5.2.0-rc.1/pr-15197.toml b/changelog/5.2.0-rc.1/pr-15197.toml new file mode 100644 index 000000000000..2f6cdece81ab --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15197.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Datanode autodiscovery, indexer hosts refactoring, preflight startup logic implemented" + +pulls = ["15197", "graylog-plugin-enterprise#5101"] + diff --git a/changelog/5.2.0-rc.1/pr-15222.toml b/changelog/5.2.0-rc.1/pr-15222.toml new file mode 100644 index 000000000000..cacfc1a5e187 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15222.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Create message in system notification stream when sidecar becomes inactive" + +pulls = ["15222"] +issues = ["6697"] diff --git a/changelog/5.2.0-rc.1/pr-15280.toml b/changelog/5.2.0-rc.1/pr-15280.toml new file mode 100644 index 000000000000..c9c027fc68bb --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15280.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Show all streams in global decorators configuration, fixing display problem" + +issues = ["13465", "13467"] +pulls = ["15280"] diff --git a/changelog/5.2.0-rc.1/pr-15284.toml b/changelog/5.2.0-rc.1/pr-15284.toml new file mode 100644 index 000000000000..8bbc39952c16 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15284.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Improving decorator config by showing map of all decorators present." + +issues = ["14546"] +pulls = ["15284"] + diff --git a/changelog/5.2.0-rc.1/pr-15305.toml b/changelog/5.2.0-rc.1/pr-15305.toml new file mode 100644 index 000000000000..d74b813f69a8 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15305.toml @@ -0,0 +1,6 @@ +type = "c" +message = "Update Content Security Policy on the fly, when an authentication backend is created." + +issues = ["15371"] +pulls = ["15305"] + diff --git a/changelog/5.2.0-rc.1/pr-15318.toml b/changelog/5.2.0-rc.1/pr-15318.toml new file mode 100644 index 000000000000..6647431e508a --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15318.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Moved AWS plugin into server repository." + +pulls = ["15318"] diff --git a/changelog/5.2.0-rc.1/pr-15378.toml b/changelog/5.2.0-rc.1/pr-15378.toml new file mode 100644 index 000000000000..01ad45fe471d --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15378.toml @@ -0,0 +1,6 @@ +type = "change" +message = "'Show received messages' for forwarder inputs now queries for messages using the new `gl2_forwarder_input` field." + +issues = ["Graylog2/graylog-plugin-enterprise#4586"] +pulls = ["15378"] + diff --git a/changelog/5.2.0-rc.1/pr-15380.toml b/changelog/5.2.0-rc.1/pr-15380.toml new file mode 100644 index 000000000000..6d8a4909841f --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15380.toml @@ -0,0 +1,6 @@ +type = "a" +message = "Add Sidecar Reader and Sidecar Manager roles to read and manage sidecars." + +issues = ["12044"] +pulls = ["15380"] + diff --git a/changelog/5.2.0-rc.1/pr-15381.toml b/changelog/5.2.0-rc.1/pr-15381.toml new file mode 100644 index 000000000000..c5157a3a0b99 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15381.toml @@ -0,0 +1,6 @@ +type = "added" +message = "Add system logs to nodes overview page." + +issues = [""] +pulls = ["15381"] + diff --git a/changelog/5.2.0-rc.1/pr-15401.toml b/changelog/5.2.0-rc.1/pr-15401.toml new file mode 100644 index 000000000000..f3db532e23f3 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15401.toml @@ -0,0 +1,5 @@ +type = "a" +message = "AMQP input improvements: retry connection with configurable interval and encrypt password." + +issues = ["12447"] +pulls = ["15401"] diff --git a/changelog/5.2.0-rc.1/pr-15409.toml b/changelog/5.2.0-rc.1/pr-15409.toml new file mode 100644 index 000000000000..0cdc4c612cf7 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15409.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Adding specific field types for streams, node & input fields." + +pulls = ["15409"] + diff --git a/changelog/5.2.0-rc.1/pr-15421.toml b/changelog/5.2.0-rc.1/pr-15421.toml new file mode 100644 index 000000000000..4e40c27234aa --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15421.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Advanced field types for scripting api - support for stream, node and input name resolution" + +pulls = ["15421"] + diff --git a/changelog/5.2.0-rc.1/pr-15437.toml b/changelog/5.2.0-rc.1/pr-15437.toml new file mode 100644 index 000000000000..3e24bd7fb583 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15437.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Make sure that search creation has finished before executing it for new saved searches/dashboards." + +issues = ["15435"] +pulls = ["15437"] + diff --git a/changelog/5.2.0-rc.1/pr-15464.toml b/changelog/5.2.0-rc.1/pr-15464.toml new file mode 100644 index 000000000000..7257a1ccb0e6 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15464.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Resolving input/streams/node ids in aggregation results, showing titles." + +pulls = ["15464"] + diff --git a/changelog/5.2.0-rc.1/pr-15465.toml b/changelog/5.2.0-rc.1/pr-15465.toml new file mode 100644 index 000000000000..993b3c1e2186 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15465.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fixing session expiration on the nodes page." + +issues = ["Graylog2/graylog-plugin-enterprise#5158"] +pulls = ["15465"] + diff --git a/changelog/5.2.0-rc.1/pr-15470.toml b/changelog/5.2.0-rc.1/pr-15470.toml new file mode 100644 index 000000000000..b109d9f3bf8b --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15470.toml @@ -0,0 +1,7 @@ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "added" +message = "Pipeline Rule Builder UI" + +issues = ["15490"] +pulls = ["15470"] + diff --git a/changelog/5.2.0-rc.1/pr-15480.toml b/changelog/5.2.0-rc.1/pr-15480.toml new file mode 100644 index 000000000000..01b13bf5d722 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15480.toml @@ -0,0 +1,7 @@ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "added" +message = "Allow simulation of json messages on the rule creation page" + +issues = ["15490"] +pulls = ["15480"] + diff --git a/changelog/5.2.0-rc.1/pr-15488.toml b/changelog/5.2.0-rc.1/pr-15488.toml new file mode 100644 index 000000000000..e109fdf7f350 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15488.toml @@ -0,0 +1,7 @@ +type = "c" +message = "Update Kafka client library from 2.7.0 to 3.4.0." + +issues = [""] +pulls = ["15488"] + +contributors = [""] diff --git a/changelog/5.2.0-rc.1/pr-15503.toml b/changelog/5.2.0-rc.1/pr-15503.toml new file mode 100644 index 000000000000..84bf752763aa --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15503.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix archiving failure in case of AWS Opensearch 429 error" + +issues = ["Graylog2/graylog-plugin-enterprise#5136"] +pulls = ["15503"] + diff --git a/changelog/5.2.0-rc.1/pr-15504.toml b/changelog/5.2.0-rc.1/pr-15504.toml new file mode 100644 index 000000000000..aa597708f617 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15504.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Extend authentication cookie's max age on every session extension." + +issues = ["15502"] +pulls = ["15504"] + diff --git a/changelog/5.2.0-rc.1/pr-15505.toml b/changelog/5.2.0-rc.1/pr-15505.toml new file mode 100644 index 000000000000..15a430d15b93 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15505.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Do not extend session when fetching indexer cluster health." + +pulls = ["15505"] + diff --git a/changelog/5.2.0-rc.1/pr-15539.toml b/changelog/5.2.0-rc.1/pr-15539.toml new file mode 100644 index 000000000000..61c97af8ccb6 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15539.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Keep query order when exporting saved searches/dashboards to content packs." + +pulls = ["15505"] +issues = ["15341", "15528"] diff --git a/changelog/5.2.0-rc.1/pr-15545.toml b/changelog/5.2.0-rc.1/pr-15545.toml new file mode 100644 index 000000000000..dea5ece25691 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15545.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Improve widget placement for specific widget sizes/positions." + +pulls = ["15545"] +issues = ["15308", "15538"] diff --git a/changelog/5.2.0-rc.1/pr-15546.toml b/changelog/5.2.0-rc.1/pr-15546.toml new file mode 100644 index 000000000000..6285b68faa0f --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15546.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add CA configuration to load a given CA during startup for datanode certificate provisioning." + +pulls = ["15546"] + diff --git a/changelog/5.2.0-rc.1/pr-15556.toml b/changelog/5.2.0-rc.1/pr-15556.toml new file mode 100644 index 000000000000..fb0f2e27d5e0 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15556.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Restrict time size based rotation strategy duration leeway on cloud" + +issues = ["Graylog2/graylog-plugin-cloud#1149"] +pulls = ["15556", "Graylog2/graylog-plugin-enterprise#5213"] diff --git a/changelog/5.2.0-rc.1/pr-15565.toml b/changelog/5.2.0-rc.1/pr-15565.toml new file mode 100644 index 000000000000..bd22b8017169 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15565.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "NodeService#allActive returns only nodes of correct type based on current server type" + +pulls = ["15505"] + diff --git a/changelog/5.2.0-rc.1/pr-15601.toml b/changelog/5.2.0-rc.1/pr-15601.toml new file mode 100644 index 000000000000..8cad64ef21b1 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15601.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Implementing highlighting for advanced field types (streams, nodes, inputs)." + +pulls = ["15601"] diff --git a/changelog/5.2.0-rc.1/pr-15613.toml b/changelog/5.2.0-rc.1/pr-15613.toml new file mode 100644 index 000000000000..32f618524149 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15613.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Avoid redirecting to path with trailing `/` when delivering static assets." + +pulls = ["15613"] diff --git a/changelog/5.2.0-rc.1/pr-15614.toml b/changelog/5.2.0-rc.1/pr-15614.toml new file mode 100644 index 000000000000..d0f774af8db7 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15614.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Include titles of advanced field types in suggestions." + +pulls = ["15614"] + diff --git a/changelog/5.2.0-rc.1/pr-15677.toml b/changelog/5.2.0-rc.1/pr-15677.toml new file mode 100644 index 000000000000..ab7ddfea31b6 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15677.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Updating Sources Dashboard with better widgets to gain insights into log volume distribution." + +pulls = ["15677"] + diff --git a/changelog/5.2.0-rc.1/pr-15682.toml b/changelog/5.2.0-rc.1/pr-15682.toml new file mode 100644 index 000000000000..3fe0876d4f90 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15682.toml @@ -0,0 +1,20 @@ +type = "a" +message = "Added the string_entropy pipeline function." + +issues = ["graylog-plugin-enterprise#4839"] +pulls = ["15682"] + +details.user = """ +The `string_entropy` pipeline function is now avaialble and can be used to compute Shannon Entropy for input strings. + +Example usage: + +``` +rule "string_entropy" +when + true +then + set_field("entropy_value", string_entropy(to_string($message.my_field))); +end +``` +""" diff --git a/changelog/5.2.0-rc.1/pr-15688.toml b/changelog/5.2.0-rc.1/pr-15688.toml new file mode 100644 index 000000000000..e119eeea6587 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15688.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixing search creation when replaying events/event definitions." + +pulls = ["15688"] +issues = ["15552", "15689"] diff --git a/changelog/5.2.0-rc.1/pr-15738.toml b/changelog/5.2.0-rc.1/pr-15738.toml new file mode 100644 index 000000000000..f13270689532 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15738.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Implementing `percentage` metric for aggregations." + +pulls = ["15738"] +issues = ["6763"] diff --git a/changelog/5.2.0-rc.1/pr-15750.toml b/changelog/5.2.0-rc.1/pr-15750.toml new file mode 100644 index 000000000000..27ec746a1c28 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15750.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix enabled_index_rotation_strategies configuration to allow setting of TimeBasedSizeOptimizingStrategy" + +pulls = ["15750"] diff --git a/changelog/5.2.0-rc.1/pr-15785.toml b/changelog/5.2.0-rc.1/pr-15785.toml new file mode 100644 index 000000000000..40ec4580d745 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15785.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Sort function names, add rule builder fragments with implicit to_string, fix string parameter handling in fragments." + +pulls = ["15785"] diff --git a/changelog/5.2.0-rc.1/pr-15793.toml b/changelog/5.2.0-rc.1/pr-15793.toml new file mode 100644 index 000000000000..4596305b5edb --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15793.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed query decoration in aggregation alerts." + +pulls = ["15793"] +issues = ["15737"] diff --git a/changelog/5.2.0-rc.1/pr-15808.toml b/changelog/5.2.0-rc.1/pr-15808.toml new file mode 100644 index 000000000000..6ae7e7a6ca33 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15808.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add action to cleanup & rebuild index ranges of all index sets." + +issues = ["11919"] +pulls = ["15808"] diff --git a/changelog/5.2.0-rc.1/pr-15814.toml b/changelog/5.2.0-rc.1/pr-15814.toml new file mode 100644 index 000000000000..0b889cedd157 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15814.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Adding values pivot option to disable empty buckets." + +pulls = ["15814"] diff --git a/changelog/5.2.0-rc.1/pr-15863.toml b/changelog/5.2.0-rc.1/pr-15863.toml new file mode 100644 index 000000000000..e20af562e6de --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15863.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Enabling `percentage`/`percentile` metrics in event definitions." + +pulls = ["15863"] +issues = ["10082"] diff --git a/changelog/5.2.0-rc.1/pr-15869.toml b/changelog/5.2.0-rc.1/pr-15869.toml new file mode 100644 index 000000000000..a5d60a852e5c --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15869.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Force user to select an entity for when picking a start page type." + +issues = ["15578"] +pulls = ["15869"] diff --git a/changelog/5.2.0-rc.1/pr-15889.toml b/changelog/5.2.0-rc.1/pr-15889.toml new file mode 100644 index 000000000000..637c96261162 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15889.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fix problem with STOPPED reactor on OutOfMemoryError. This error stops the whole GL instance now, instead of constantly logging errors related to stopped reactor." + +issues = ["graylog-plugin-enterprise#4980"] +pulls = ["15889"] diff --git a/changelog/5.2.0-rc.1/pr-15948.toml b/changelog/5.2.0-rc.1/pr-15948.toml new file mode 100644 index 000000000000..0d3931126463 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-15948.toml @@ -0,0 +1,5 @@ +type = "c" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Using composable index templates instead of legacy templates." + +issues = ["15782"] +pulls = ["15948"] diff --git a/changelog/5.2.0-rc.1/pr-16019.toml b/changelog/5.2.0-rc.1/pr-16019.toml new file mode 100644 index 000000000000..23de8c80133f --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16019.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Mark required fields in body of API call `PUT roles/{rolename}`" + +issues = ["16019"] +pull = ["16046"] diff --git a/changelog/5.2.0-rc.1/pr-16035.toml b/changelog/5.2.0-rc.1/pr-16035.toml new file mode 100644 index 000000000000..1f5ce742a0a8 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16035.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add event limit for filter event definition." + +issues = ["graylog-plugin-enterprise-integrations#5382"] +pulls = ["16035"] diff --git a/changelog/5.2.0-rc.1/pr-16050.toml b/changelog/5.2.0-rc.1/pr-16050.toml new file mode 100644 index 000000000000..9446787e5b60 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16050.toml @@ -0,0 +1,5 @@ +type = "c" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "When entity was added to the favorites, it was added to the end of the list. It meant that top of the favorites' list contained entities that had been added first to the list. It seemed wrong. From now on, the newly added entity will be shown on the top of the favorites' list." + +issues = [""] +pulls = ["16050"] diff --git a/changelog/5.2.0-rc.1/pr-16051.toml b/changelog/5.2.0-rc.1/pr-16051.toml new file mode 100644 index 000000000000..1d40c547addb --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16051.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Include parameters when replaying search for dashboard widget." + +issues = ["graylog-plugin-enterprise#4576"] +pulls = ["16051"] diff --git a/changelog/5.2.0-rc.1/pr-16072.toml b/changelog/5.2.0-rc.1/pr-16072.toml new file mode 100644 index 000000000000..c420821a9ce1 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16072.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix event definition enable UI state for Filter & Aggregation." + +issues = ["#16070"] +pulls = ["16072"] diff --git a/changelog/5.2.0-rc.1/pr-16092.toml b/changelog/5.2.0-rc.1/pr-16092.toml new file mode 100644 index 000000000000..69025feb6582 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16092.toml @@ -0,0 +1,13 @@ +type = "fixed" +message = "Fix internal server error for cluster logger level API requests." + +issues = ["11408", "14256"] +pulls = ["16092"] + +details.user = """ +Requests to the cluster logger-level endpoint `/cluster/system/loggers/{loggerName}/level/{level}` previously responded +with a `500` error status (but still executed successfully) unless the request included the `Accept: application/json` +header. + +This API endpoint has been fixed to respond with a `200` success code even when the `Accept` header is not supplied. +""" diff --git a/changelog/5.2.0-rc.1/pr-16095.toml b/changelog/5.2.0-rc.1/pr-16095.toml new file mode 100644 index 000000000000..52a7a0a817de --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16095.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Implement option to change order of search highlighting rules." + +pulls = ["16095"] diff --git a/changelog/5.2.0-rc.1/pr-16134.toml b/changelog/5.2.0-rc.1/pr-16134.toml new file mode 100644 index 000000000000..d304bbee2e62 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16134.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Mark users synced froma a deactivated authorization backend in user overview list and on user detail page." + +issues = ["13823"] +pulls = ["16134"] diff --git a/changelog/5.2.0-rc.1/pr-16153.toml b/changelog/5.2.0-rc.1/pr-16153.toml new file mode 100644 index 000000000000..144c1842acee --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16153.toml @@ -0,0 +1,3 @@ +type = "r" +message = "Remove obsolete `ElasticsearchConfigMigration` and `AddRetentionRotationAndDefaultFlagToIndexSetMigration` migrations." +pulls = ["16153"] diff --git a/changelog/5.2.0-rc.1/pr-16161.toml b/changelog/5.2.0-rc.1/pr-16161.toml new file mode 100644 index 000000000000..0a7434048a64 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16161.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add pipeline name and stage to log output when an exception occurs during pipeline processing." + +issues = ["8627"] +pulls = ["16161"] diff --git a/changelog/5.2.0-rc.1/pr-16166.toml b/changelog/5.2.0-rc.1/pr-16166.toml new file mode 100644 index 000000000000..26880cc3bc64 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16166.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Make `creation_date` optional when creating index sets via api. If none is provided, the current server time will be used." + +issues = ["4709"] +pulls = ["16166"] diff --git a/changelog/5.2.0-rc.1/pr-16175.toml b/changelog/5.2.0-rc.1/pr-16175.toml new file mode 100644 index 000000000000..0fbec9f318ba --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16175.toml @@ -0,0 +1,6 @@ +type = "f" +message = "Fix possible NullPointerExceptions when writing pipeline rules with maps." + +pulls = ["16175"] +issues = ["14818"] + diff --git a/changelog/5.2.0-rc.1/pr-16184.toml b/changelog/5.2.0-rc.1/pr-16184.toml new file mode 100644 index 000000000000..3ba97568dec9 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16184.toml @@ -0,0 +1,4 @@ +type = "r" +message = "Remove ERROR level logs that happened during user/role creation." + +pulls = ["16184"] diff --git a/changelog/5.2.0-rc.1/pr-16195.toml b/changelog/5.2.0-rc.1/pr-16195.toml new file mode 100644 index 000000000000..e9a5a12a7f86 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16195.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Added a new Event Definition column: Last Matched." + +issues = ["15751"] +pulls = ["16195"] diff --git a/changelog/5.2.0-rc.1/pr-16196.toml b/changelog/5.2.0-rc.1/pr-16196.toml new file mode 100644 index 000000000000..3c2ef182b073 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16196.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Show syntax error in pipeline rule editor when trying to access nested variables using an array literal, e.g. `result.[\"key\"]`." + +pulls = ["16196"] +issues = ["16029"] diff --git a/changelog/5.2.0-rc.1/pr-16199.toml b/changelog/5.2.0-rc.1/pr-16199.toml new file mode 100644 index 000000000000..7475d8e7d3fa --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16199.toml @@ -0,0 +1,6 @@ +type = "c" +message = "Introduce a cache configuration setting to avoid caching empty values." + +issues = ["13579", "15200"] +pulls = ["16199"] + diff --git a/changelog/5.2.0-rc.1/pr-16241.toml b/changelog/5.2.0-rc.1/pr-16241.toml new file mode 100644 index 000000000000..defc1c4cfe52 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16241.toml @@ -0,0 +1,4 @@ +type = "d" +message = "Adding REST endpoint to create custom field type mappings for index sets." + +pulls = ["16241"] diff --git a/changelog/5.2.0-rc.1/pr-16251.toml b/changelog/5.2.0-rc.1/pr-16251.toml new file mode 100644 index 000000000000..7d8a73a17c60 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16251.toml @@ -0,0 +1,6 @@ +type = "a" +message = "Add API endpoint for event definition duplication." + +issues = [""] +pulls = ["16251"] + diff --git a/changelog/5.2.0-rc.1/pr-16274.toml b/changelog/5.2.0-rc.1/pr-16274.toml new file mode 100644 index 000000000000..272849d63460 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16274.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Merging integrations plugin into core repository." + +pulls = ["16274"] diff --git a/changelog/5.2.0-rc.1/pr-16317.toml b/changelog/5.2.0-rc.1/pr-16317.toml new file mode 100644 index 000000000000..ad23f9fd330b --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16317.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Introduce telemetry custom event name" + +pulls = ["16317"] diff --git a/changelog/5.2.0-rc.1/pr-16334.toml b/changelog/5.2.0-rc.1/pr-16334.toml new file mode 100644 index 000000000000..d757102df509 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16334.toml @@ -0,0 +1,9 @@ +type = "added" +message = "DataNode: if you use the DataNode, the nodes system clocks have to be synchronized for JWT usage." + +pulls = ["16334"] + +details.user = """ +JWT usage over several systems/nodes requires clock synchronization to prevent errors with expiration time and +clock skew between client/server. +""" diff --git a/changelog/5.2.0-rc.1/pr-16344.toml b/changelog/5.2.0-rc.1/pr-16344.toml new file mode 100644 index 000000000000..0d4754c47ff4 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16344.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add change field type action for a field" + +pulls = ["16344"] +issues = ["16469"] diff --git a/changelog/5.2.0-rc.1/pr-16377-snakeyaml.toml b/changelog/5.2.0-rc.1/pr-16377-snakeyaml.toml new file mode 100644 index 000000000000..1fb0b67678cd --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16377-snakeyaml.toml @@ -0,0 +1,4 @@ +type = "s" +message = "Update SnakeYAML to version 2.2. (fixes CVE-2022-1471)" + +pulls = ["16377"] diff --git a/changelog/5.2.0-rc.1/pr-16377.toml b/changelog/5.2.0-rc.1/pr-16377.toml new file mode 100644 index 000000000000..aa9a7b76765c --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16377.toml @@ -0,0 +1,5 @@ +type = "s" +message = "Update Jackson to 2.15.2. (CVE-2022-42003)" + +issues = [""] +pulls = ["16377"] diff --git a/changelog/5.2.0-rc.1/pr-16407.toml b/changelog/5.2.0-rc.1/pr-16407.toml new file mode 100644 index 000000000000..3510f68c0825 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16407.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix blocked system events index preventing node registration and leader election." + +issues = ["Graylog2/graylog-plugin-enterprise#5750"] +pulls = ["16407"] diff --git a/changelog/5.2.0-rc.1/pr-16426.toml b/changelog/5.2.0-rc.1/pr-16426.toml new file mode 100644 index 000000000000..40cf908d1de4 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16426.toml @@ -0,0 +1,5 @@ +type = "changed" +message = "Publish system events asynchronously to prevent threads getting blocked." + +issues = [""] +pulls = ["16426"] diff --git a/changelog/5.2.0-rc.1/pr-16456.toml b/changelog/5.2.0-rc.1/pr-16456.toml new file mode 100644 index 000000000000..d21be2a3023b --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16456.toml @@ -0,0 +1,4 @@ +type = "s" +message = "Update Scala to 2.13.12 to include fix of CVE-2022-36944." + +pulls = ["16456"] diff --git a/changelog/5.2.0-rc.1/pr-16475.toml b/changelog/5.2.0-rc.1/pr-16475.toml new file mode 100644 index 000000000000..20e80e4efb4f --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16475.toml @@ -0,0 +1,4 @@ +type = "f" +message = "DNS Resolver Pool warning log message now accurately reflects configured `dns_lookup_adapter_resolver_pool_size` value." + +pulls = ["16475"] \ No newline at end of file diff --git a/changelog/5.2.0-rc.1/pr-16476.toml b/changelog/5.2.0-rc.1/pr-16476.toml new file mode 100644 index 000000000000..840cdf606707 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16476.toml @@ -0,0 +1,22 @@ +type = "security" +message = "Update various third-party libraries to address vulnerabilites." + +pulls = ["16476"] + +details.user = """ +Updates third-party libraries to address the following vulnerabilities: + + - [CVE-2022-36944](https://www.cve.org/CVERecord?id=CVE-2022-36944) + - [CVE-2023-34455](https://www.cve.org/CVERecord?id=CVE-2023-34455) + - [CVE-2021-22569](https://www.cve.org/CVERecord?id=CVE-2021-22569) + - [CVE-2019-10086](https://www.cve.org/CVERecord?id=CVE-2019-10086) + - [CVE-2021-28170](https://www.cve.org/CVERecord?id=CVE-2021-28170) + - [CVE-2021-29425](https://www.cve.org/CVERecord?id=CVE-2021-29425) + - [CVE-2023-3635](https://www.cve.org/CVERecord?id=CVE-2023-3635) + - [CVE-2022-25647](https://www.cve.org/CVERecord?id=CVE-2022-25647) + - [CVE-2023-34462](https://www.cve.org/CVERecord?id=CVE-2023-34462) + - [CVE-2023-33201](https://www.cve.org/CVERecord?id=CVE-2023-33201) + - [CVE-2020-10693](https://www.cve.org/CVERecord?id=CVE-2020-10693) + - [CVE-2021-37533](https://www.cve.org/CVERecord?id=CVE-2021-37533) + - [CVE-2023-2976](https://www.cve.org/CVERecord?id=CVE-2023-2976) +""" diff --git a/changelog/5.2.0-rc.1/pr-16481.toml b/changelog/5.2.0-rc.1/pr-16481.toml new file mode 100644 index 000000000000..697dbddec5ea --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16481.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Indexer cluster checker thread start is delayed by 5s to avoid potential deadlock." + +pulls = ["16481"] diff --git a/changelog/5.2.0-rc.1/pr-16503.toml b/changelog/5.2.0-rc.1/pr-16503.toml new file mode 100644 index 000000000000..37858e030fe9 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16503.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixes failure to delete indices with time/size optimizing strategy, when write block is not present." + +issues = ["Graylog2/graylog-plugin-enterprise#5722"] +pulls = ["16503"] diff --git a/changelog/5.2.0-rc.1/pr-16514.toml b/changelog/5.2.0-rc.1/pr-16514.toml new file mode 100644 index 000000000000..11f029b8e8b2 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16514.toml @@ -0,0 +1,13 @@ +type = "a" +message = "Added ability to clean field names for set_field and set_fields pipeline functions" + +issues = ["7137", "graylog-plugin-enterprise#5794"] +pulls = ["16514"] + +details.user = """ +Add optional parameters to the following pipeline functions that can be used to replace invalid field name characters +with underscores. Both default to `false`. + +* `set_field` pipeline function: New parameter: `clean_field` +* `set_fields` pipeline function: New parameter: `clean_fields` +""" diff --git a/changelog/5.2.0-rc.1/pr-16563.toml b/changelog/5.2.0-rc.1/pr-16563.toml new file mode 100644 index 000000000000..fdf0bddcb431 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16563.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixing display of session timeout unit on user edit page." + +pulls = ["16563"] diff --git a/changelog/5.2.0-rc.1/pr-16668.toml b/changelog/5.2.0-rc.1/pr-16668.toml new file mode 100644 index 000000000000..de816ba25d90 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16668.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fix description for the scenario that one term or value has multiple matching highlighting rules." + +pulls = ["16668"] diff --git a/changelog/5.2.0-rc.1/pr-16722.toml b/changelog/5.2.0-rc.1/pr-16722.toml new file mode 100644 index 000000000000..ea7c662893c6 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16722.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Enabling pre-flight UI for general availability." + +pulls = ["16722"] diff --git a/changelog/5.2.0-rc.1/pr-16773.toml b/changelog/5.2.0-rc.1/pr-16773.toml new file mode 100644 index 000000000000..80cdec2a5c23 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16773.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Enabling field type management for general availability." + +pulls = ["16773"] diff --git a/changelog/5.2.0-rc.1/pr-16902.toml b/changelog/5.2.0-rc.1/pr-16902.toml new file mode 100644 index 000000000000..9227a9e5ff35 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16902.toml @@ -0,0 +1,4 @@ +type = "s" +message = "Update Netty to 4.1.100 to fix the [HTTP/2 Rapid Reset attack](https://blog.cloudflare.com/technical-breakdown-http2-rapid-reset-ddos-attack/). [GHSA-xpw8-rcwv-8f8p](https://github.com/netty/netty/security/advisories/GHSA-xpw8-rcwv-8f8p) The security vulnerability affects all Graylog users that run the Graylog Forwarder input on their servers." + +pulls = ["16902"] diff --git a/changelog/5.2.0-rc.1/pr-16927.toml b/changelog/5.2.0-rc.1/pr-16927.toml new file mode 100644 index 000000000000..732e003049cc --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16927.toml @@ -0,0 +1,5 @@ +type = "f" +message = "By default, 'none' retention strategy is now disabled for new installations." + +issues = ["graylog-plugin-enterprise#5894"] +pulls = ["16927"] diff --git a/changelog/5.2.0-rc.1/pr-16962.toml b/changelog/5.2.0-rc.1/pr-16962.toml new file mode 100644 index 000000000000..3ca30a34cd84 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16962.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Allow logging metrics to include DEBUG/TRACE level." + +issues = ["10866"] +pulls = ["16962"] diff --git a/changelog/5.2.0-rc.1/pr-16999.toml b/changelog/5.2.0-rc.1/pr-16999.toml new file mode 100644 index 000000000000..50a7b9512e5a --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-16999.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Aligning order of precedence of auth info in proxied resources with general authentication logic." + +issues = ["16985"] +pulls = ["16999"] diff --git a/changelog/5.2.0-rc.1/pr-17012.toml b/changelog/5.2.0-rc.1/pr-17012.toml new file mode 100644 index 000000000000..38505b1a8dfd --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-17012.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Change loggers in indexer subsystem to output all messages related to indexing/searching." + +issues = ["17011"] +pulls = ["17012"] diff --git a/changelog/5.2.0-rc.1/pr-integrations-1340.toml b/changelog/5.2.0-rc.1/pr-integrations-1340.toml new file mode 100644 index 000000000000..0d2981dbb0e8 --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-integrations-1340.toml @@ -0,0 +1,11 @@ +type = "d" +message = "GreyNoise Data Adapter functionality has been limited to only use with non-community GreyNoise subscriptions." + +issues = ["Graylog2/graylog-plugin-enterprise#5157"] +pulls = ["Graylog2/graylog-plugin-integrations#1340", "Graylog2/graylog-plugin-enterprise-integrations#1059", "Graylog2/graylog2-server#15592"] +details.user = """ +- GreyNoise Community IP Lookup Data Adapters have been marked as deprecated. Existing Data Adapters can no longer be + started or lookups performed. +- GreyNoise Full IP Lookup [Enterprise] Data Adapter can no longer be used with a free GreyNoise Community API tokens. +- GreyNoise Quick IP Lookup Data Adapter can no longer be used with a free GreyNoise Community API tokens. +""" diff --git a/changelog/5.2.0-rc.1/pr-integrations-1348.toml b/changelog/5.2.0-rc.1/pr-integrations-1348.toml new file mode 100644 index 000000000000..5902a5fd885c --- /dev/null +++ b/changelog/5.2.0-rc.1/pr-integrations-1348.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Include link to replay an alert in teams and slack notification template." + +issues = ["Graylog2/graylog-server#15678"] +pulls = ["Graylog2/graylog-plugin-integrations#1348"] diff --git a/changelog/6.0.0-rc.1/.gitkeep b/changelog/6.0.0-rc.1/.gitkeep new file mode 100644 index 000000000000..900f0cb27b2e --- /dev/null +++ b/changelog/6.0.0-rc.1/.gitkeep @@ -0,0 +1 @@ +# Keep the directory in Git \ No newline at end of file diff --git a/changelog/6.0.0-rc.1/ghsa-3xf8-g8gr-g7rh.toml b/changelog/6.0.0-rc.1/ghsa-3xf8-g8gr-g7rh.toml new file mode 100644 index 000000000000..9cf6da1ed33c --- /dev/null +++ b/changelog/6.0.0-rc.1/ghsa-3xf8-g8gr-g7rh.toml @@ -0,0 +1,2 @@ +type = "security" +message = "Always create new sessions for authentication attempts to fix a potential session fixation vulnerability. [GHSA-3xf8-g8gr-g7rh](https://github.com/Graylog2/graylog2-server/security/advisories/GHSA-3xf8-g8gr-g7rh)" diff --git a/changelog/6.0.0-rc.1/issue-13499.toml b/changelog/6.0.0-rc.1/issue-13499.toml new file mode 100644 index 000000000000..d98f6ed8fa8e --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-13499.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Provide more diagnostic information when stream rule fails." + +pulls = ["18539"] +issues=["13499"] diff --git a/changelog/6.0.0-rc.1/issue-13673.toml b/changelog/6.0.0-rc.1/issue-13673.toml new file mode 100644 index 000000000000..f552b0444c8f --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-13673.toml @@ -0,0 +1,5 @@ +type = "c" +message = "The default value for the `data_dir` configuration option has been removed. It needs to be configured explicitly now." + +issues = ["13673"] +pulls = ["15902"] diff --git a/changelog/6.0.0-rc.1/issue-13856.toml b/changelog/6.0.0-rc.1/issue-13856.toml new file mode 100644 index 000000000000..6bb6103c8863 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-13856.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix problems with unknown-field validation warnings on fields like gl2_source_input. From now on, Query Validation never treats special GL fields as unknown." + +issues = ["13856"] +pulls = ["17515"] diff --git a/changelog/6.0.0-rc.1/issue-14691.toml b/changelog/6.0.0-rc.1/issue-14691.toml new file mode 100644 index 000000000000..ebf892fc3764 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-14691.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added support for sending HTTP Notification API Key/Secret as a header" + +issues = ["14691"] +pulls = ["17369"] diff --git a/changelog/6.0.0-rc.1/issue-16096.toml b/changelog/6.0.0-rc.1/issue-16096.toml new file mode 100644 index 000000000000..3b05e7e74e8a --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-16096.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix timezone issue with date picker, which resulted in highlighting the wrong selected day." + +issues = ["16096"] +pulls = ["16973"] diff --git a/changelog/6.0.0-rc.1/issue-16284.toml b/changelog/6.0.0-rc.1/issue-16284.toml new file mode 100644 index 000000000000..e26c7869ef28 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-16284.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Store message receive and processing times in ES/OS fields." + +issues = ["16284"] +pulls = [""] diff --git a/changelog/6.0.0-rc.1/issue-16388.toml b/changelog/6.0.0-rc.1/issue-16388.toml new file mode 100644 index 000000000000..ebce7de2eac1 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-16388.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "debounce Query Change in Alerts & Events" + +issues = ["16388"] +pulls = ["17987"] diff --git a/changelog/6.0.0-rc.1/issue-16428.toml b/changelog/6.0.0-rc.1/issue-16428.toml new file mode 100644 index 000000000000..3f0e1abd3071 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-16428.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Set `SameSite` attribute in cookies." + +issues = ["16428"] +pulls = ["18329"] diff --git a/changelog/6.0.0-rc.1/issue-16885.toml b/changelog/6.0.0-rc.1/issue-16885.toml new file mode 100644 index 000000000000..13c5e4b678b1 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-16885.toml @@ -0,0 +1,19 @@ +type = "changed" +message = "Changed Content Packs handling to allow import/export of entites that reference Streams by title." + +pulls = ["16743"] + +details.user = """ +Previously it was not possible to create a Content Pack with Stream scoped entities without also exporting the referenced Stream, +and potentially duplicating the Stream. + +For example, if a user had a Dashboard with a widget that was scoped to "stream_xyz" that they wished to create a +Content Pack with to use on another system, there were two options: +- Remove the Stream from the Dashboard widget before export, then re-associate the Stream with the Dashboard after uploading. +- Export the Stream along with the Dashboard, in which case a new "stream_xyz" would be created on the uploading system + (whether it already existed or not). + +This change allows users to create a Content Pack with a "stream_xyz" scoped Dashboard, referencing the Stream by title only. +When uploaded and installed, the Content Pack will resolve the existing stream with title "stream_xyz", +and associate it to the new Dashboard. +""" diff --git a/changelog/6.0.0-rc.1/issue-16980.toml b/changelog/6.0.0-rc.1/issue-16980.toml new file mode 100644 index 000000000000..82103b1f5c07 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-16980.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix rule builder dropdown UX" + +issues = ["16980"] +pulls = ["17919"] diff --git a/changelog/6.0.0-rc.1/issue-16988.toml b/changelog/6.0.0-rc.1/issue-16988.toml new file mode 100644 index 000000000000..01536a155651 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-16988.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added the Email Notification ID to the System Notification for Missing Email Recipients." + +issues = ["16988"] +pulls = ["17061"] diff --git a/changelog/6.0.0-rc.1/issue-17040.toml b/changelog/6.0.0-rc.1/issue-17040.toml new file mode 100644 index 000000000000..5667a6ecf705 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17040.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Enable sidecar default configurations to be updated on existing installs." + +issues = ["17040"] +pulls = ["17246"] diff --git a/changelog/6.0.0-rc.1/issue-17082.toml b/changelog/6.0.0-rc.1/issue-17082.toml new file mode 100644 index 000000000000..b6cf35d0389d --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17082.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added a keyboard shortcuts page" + +issues = ["17082"] +pulls = ["17083"] diff --git a/changelog/6.0.0-rc.1/issue-17085.toml b/changelog/6.0.0-rc.1/issue-17085.toml new file mode 100644 index 000000000000..9164ed0ba1b6 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17085.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Move existing DataNode config into separate page." + +issues = ["17085"] +pulls = ["17104"] diff --git a/changelog/6.0.0-rc.1/issue-17204.toml b/changelog/6.0.0-rc.1/issue-17204.toml new file mode 100644 index 000000000000..6a743d9bf955 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17204.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix tooltip colors on Rule Editor." + +issues = ["17204"] +pulls = ["17220"] diff --git a/changelog/6.0.0-rc.1/issue-17247.toml b/changelog/6.0.0-rc.1/issue-17247.toml new file mode 100644 index 000000000000..7f68f37fd710 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17247.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Data nodes now publish state machine state. Provides a backend for data node table including state information." +issues = ["17247"] +pulls = ["17382"] + diff --git a/changelog/6.0.0-rc.1/issue-17261.toml b/changelog/6.0.0-rc.1/issue-17261.toml new file mode 100644 index 000000000000..4d799864908f --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17261.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixing problem with persisting selected page size for some paginated lists." +issues = ["17261"] +pulls = ["17278"] diff --git a/changelog/6.0.0-rc.1/issue-17284.toml b/changelog/6.0.0-rc.1/issue-17284.toml new file mode 100644 index 000000000000..b9f784837c5f --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17284.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add action for field type management page to remove custom field types." + +issues = ["17284"] +pulls = ["17281"] diff --git a/changelog/6.0.0-rc.1/issue-17285.toml b/changelog/6.0.0-rc.1/issue-17285.toml new file mode 100644 index 000000000000..c9c7145945c3 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17285.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add filtering to index field types page" + +issues = ["17285"] +pulls = ["17326"] diff --git a/changelog/6.0.0-rc.1/issue-17286.toml b/changelog/6.0.0-rc.1/issue-17286.toml new file mode 100644 index 000000000000..5ad2b85e4e17 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17286.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added remove custom field type bulk action to index set field type page" + +issues = ["17286"] +pulls = ["17429"] diff --git a/changelog/6.0.0-rc.1/issue-17314.toml b/changelog/6.0.0-rc.1/issue-17314.toml new file mode 100644 index 000000000000..f1578b187618 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17314.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed event aggregation handling when aggregating on missing fields" + +issues = ["17314"] +pulls = ["17604"] diff --git a/changelog/6.0.0-rc.1/issue-17343.toml b/changelog/6.0.0-rc.1/issue-17343.toml new file mode 100644 index 000000000000..f5a849255b0e --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17343.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add option to add custom field type to non existing field. Add option to change field type with option to choose field from the list" + +issues = ["17343"] +pulls = ["17408"] diff --git a/changelog/6.0.0-rc.1/issue-17367.toml b/changelog/6.0.0-rc.1/issue-17367.toml new file mode 100644 index 000000000000..580ac755cb54 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17367.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Improve migration and error handling for event definitions that are missing a field value." + +issues = ["17367"] +pulls = [""] diff --git a/changelog/6.0.0-rc.1/issue-17385.toml b/changelog/6.0.0-rc.1/issue-17385.toml new file mode 100644 index 000000000000..2c42cfaa51c6 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17385.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix preflight basic auth prompt when password has not been entered" + +issues = ["17385"] +pulls = ["17405"] diff --git a/changelog/6.0.0-rc.1/issue-17424.toml b/changelog/6.0.0-rc.1/issue-17424.toml new file mode 100644 index 000000000000..f62fb71725fe --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17424.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Datanode preflight check for data directory compatibility" + +issues = ["17424"] +pulls = ["17487"] diff --git a/changelog/6.0.0-rc.1/issue-17430.toml b/changelog/6.0.0-rc.1/issue-17430.toml new file mode 100644 index 000000000000..cfc0a0e8cb66 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17430.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Add Data Node pre-flight check to validate the `vm.max_map_count` sysctl value." + +issues = ["17430"] +pulls = ["17436"] diff --git a/changelog/6.0.0-rc.1/issue-17450.toml b/changelog/6.0.0-rc.1/issue-17450.toml new file mode 100644 index 000000000000..1f65b69c300e --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17450.toml @@ -0,0 +1,19 @@ +type = "changed" +message = "Automatically choose default number of process-buffer and output-buffer processors based on available CPU cores." + +issues = ["17450"] +pulls = ["17737"] + +details.user = """ +The default values for the configuration settings `processbuffer_processors` and `outputbuffer_processors` have been +changed. The values will now be calculated based on the number of CPU cores available to the JVM. If you have not +explicitly set values for these settings in your configuration file, the new defaults apply. + +The new defaults should improve performance of your system, however, if you want to continue running your system with +the previous defaults, please add the following settings to your configuration file: + +``` +processbuffer_processors = 5 +outputbuffer_processors = 3 +``` +""" diff --git a/changelog/6.0.0-rc.1/issue-17464.toml b/changelog/6.0.0-rc.1/issue-17464.toml new file mode 100644 index 000000000000..afa8b422592c --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17464.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Pipeline rule simulator - JSON inside message" + +issues = ["17464"] +pulls = ["17817"] diff --git a/changelog/6.0.0-rc.1/issue-17523.toml b/changelog/6.0.0-rc.1/issue-17523.toml new file mode 100644 index 000000000000..7347e3e4afed --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17523.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Ensure password secret meets the minimum length requirement if using/for the DataNode." + +issues = ["17523"] +pulls = ["17719"] diff --git a/changelog/6.0.0-rc.1/issue-17526.toml b/changelog/6.0.0-rc.1/issue-17526.toml new file mode 100644 index 000000000000..e314cf5e1b3e --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17526.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix filebeat path for sidecar default templates and default configurations." + +issues = ["17526"] +pulls = ["17624"] diff --git a/changelog/6.0.0-rc.1/issue-17570.toml b/changelog/6.0.0-rc.1/issue-17570.toml new file mode 100644 index 000000000000..573b3376cf13 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17570.toml @@ -0,0 +1,5 @@ +type = "r" +message = "The close index button has been removed from the index set overview page for a specific index." + +issues = ["17570"] +pulls = ["18037"] diff --git a/changelog/6.0.0-rc.1/issue-17619.toml b/changelog/6.0.0-rc.1/issue-17619.toml new file mode 100644 index 000000000000..1d09e1c2736d --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17619.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix ineffective search query after exporting search to dashboard" + +issues = ["17619"] +pulls = ["17621"] diff --git a/changelog/6.0.0-rc.1/issue-17647.toml b/changelog/6.0.0-rc.1/issue-17647.toml new file mode 100644 index 000000000000..7f9a1ea975e3 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17647.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Updated the select_jsonpath pipeline function to accept JSON strings as the `json` parameter in addition to parsed JsonNode objects." + +issues = ["17647"] +pulls = ["17683"] diff --git a/changelog/6.0.0-rc.1/issue-17649.toml b/changelog/6.0.0-rc.1/issue-17649.toml new file mode 100644 index 000000000000..cca2bba57401 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17649.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Provide more verbose error messages for HTTPJSONPath data adapter." + +issues = ["17649"] +pulls = ["17801"] diff --git a/changelog/6.0.0-rc.1/issue-17704.toml b/changelog/6.0.0-rc.1/issue-17704.toml new file mode 100644 index 000000000000..1cddc6a5eb94 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17704.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Datanode Cluster Management UI" + +issues = ["17704"] +pulls = ["17881"] diff --git a/changelog/6.0.0-rc.1/issue-17705.toml b/changelog/6.0.0-rc.1/issue-17705.toml new file mode 100644 index 000000000000..0659d08686db --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17705.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "added actions on datanode datails page" + +issues = ["17705"] +pulls = ["17840"] diff --git a/changelog/6.0.0-rc.1/issue-17728.toml b/changelog/6.0.0-rc.1/issue-17728.toml new file mode 100644 index 000000000000..da427ba568de --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17728.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fields `gl2_message_id` and `streams` are now reserved and cannot be changed by custom mappings. No reserved field can be changed when you manually change custom mappings in MongoDB - it will be ignored." + +issues = ["17728"] +pulls = ["17766"] diff --git a/changelog/6.0.0-rc.1/issue-17746.toml b/changelog/6.0.0-rc.1/issue-17746.toml new file mode 100644 index 000000000000..637bbabc90ef --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17746.toml @@ -0,0 +1,12 @@ +type = "a" +message = "Add index set field type profiles overview and edit page" + +pulls = ["17775"] +issues=["17746"] + +details.user = """ +Before this change, it was possible to create and manage custom field type mappings +of existing index sets. For every new index set that is created, +these steps have to be repeated though. This change gives an option to bundle up +custom field types into profiles. +""" diff --git a/changelog/6.0.0-rc.1/issue-17748.toml b/changelog/6.0.0-rc.1/issue-17748.toml new file mode 100644 index 000000000000..4cfce6cd454b --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17748.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add action to profile creation from selected types" + +pulls = ["18027"] +issues=["17746"] diff --git a/changelog/6.0.0-rc.1/issue-17749.toml b/changelog/6.0.0-rc.1/issue-17749.toml new file mode 100644 index 000000000000..16b63a6c8b69 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17749.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add deleting index set field type profile action" + +issues = ["17815"] +pulls = ["18031"] diff --git a/changelog/6.0.0-rc.1/issue-17815.toml b/changelog/6.0.0-rc.1/issue-17815.toml new file mode 100644 index 000000000000..f84df47264a2 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17815.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Allow the index range clean up periodcal to delete index ranges that are no longer managed by an index set" + +issues = ["17815"] +pulls = ["17841"] diff --git a/changelog/6.0.0-rc.1/issue-17882.toml b/changelog/6.0.0-rc.1/issue-17882.toml new file mode 100644 index 000000000000..a845faa81b6d --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17882.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixed error when attempting to share entities with stream dependencies." + +issues = ["17882"] +pulls = ["17891"] diff --git a/changelog/6.0.0-rc.1/issue-17892.toml b/changelog/6.0.0-rc.1/issue-17892.toml new file mode 100644 index 000000000000..aa1b48086bae --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17892.toml @@ -0,0 +1,11 @@ +type = "fixed" +message = """ +Corrected parameter description for handle_dup_keys in the pipline rule key_value function. +If not specified, the function defaults to using the first encountered value for duplicate keys. +Fixed default values when using the rule builder. +""" + +issues = ["17892"] +pulls = ["17969"] + + diff --git a/changelog/6.0.0-rc.1/issue-17934.toml b/changelog/6.0.0-rc.1/issue-17934.toml new file mode 100644 index 000000000000..e921863ed4a1 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17934.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add new column 'Origin' to index set field type mapping page instead of is custom" + +pulls = ["17878"] +issues=["17934"] diff --git a/changelog/6.0.0-rc.1/issue-17958.toml b/changelog/6.0.0-rc.1/issue-17958.toml new file mode 100644 index 000000000000..fe1ae987fcb0 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17958.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix issue where enabled event definitions would be incorrectly set to disabled through the REST API." + +issues = ["17958"] +pulls = ["17959"] diff --git a/changelog/6.0.0-rc.1/issue-17994.toml b/changelog/6.0.0-rc.1/issue-17994.toml new file mode 100644 index 000000000000..10e50a78cc5d --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-17994.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Add ability to specify a TTL for null values in lookup table caches." + +pulls = ["17994"] +issues=["16466"] diff --git a/changelog/6.0.0-rc.1/issue-18000.toml b/changelog/6.0.0-rc.1/issue-18000.toml new file mode 100644 index 000000000000..086ddfb64c00 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18000.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixing UI runtime error when request for alerts overview returns an error." + +pulls=["18000"] +issues=["17995"] diff --git a/changelog/6.0.0-rc.1/issue-18002.toml b/changelog/6.0.0-rc.1/issue-18002.toml new file mode 100644 index 000000000000..2b1650079e62 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18002.toml @@ -0,0 +1,7 @@ +type = "fixed" +message = "Fixes MoreSearch, so that it does not throw errors when affected indices are null or empty. Because of that, MoreSearch is safer to use on fresh installation, with no events and therefore no index ranges for events indices." + +issues = ["18002"] +pulls = ["18023"] + + diff --git a/changelog/6.0.0-rc.1/issue-18017.toml b/changelog/6.0.0-rc.1/issue-18017.toml new file mode 100644 index 000000000000..10c3e4c65839 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18017.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Reverts fix for #16029 due to regression in GeoIP lookup table access." + +issues = ["18017"] +pulls = ["18322"] diff --git a/changelog/6.0.0-rc.1/issue-18032.toml b/changelog/6.0.0-rc.1/issue-18032.toml new file mode 100644 index 000000000000..e7d0b8d5aee6 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18032.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added 'field_value_suggestion_mode' config parameter, that allows to switch field value suggestions off, turn them on and turn them on only for textual fields." + +issues = ["18032"] +pulls = ["18095"] diff --git a/changelog/6.0.0-rc.1/issue-18049.toml b/changelog/6.0.0-rc.1/issue-18049.toml new file mode 100644 index 000000000000..3b19ec0d8133 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18049.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix index set defaults not being reflected in the form without reload." + +issues = ["18049"] +pulls = ["18098"] diff --git a/changelog/6.0.0-rc.1/issue-18053.toml b/changelog/6.0.0-rc.1/issue-18053.toml new file mode 100644 index 000000000000..bb99fde4cc81 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18053.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing problem with query input which can make the search bar disappear." + +issues = ["18053"] +pulls = ["18470"] diff --git a/changelog/6.0.0-rc.1/issue-18127.toml b/changelog/6.0.0-rc.1/issue-18127.toml new file mode 100644 index 000000000000..2c26f858e594 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18127.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Suppress index_not_found_exception when there is a race between querying and deleting an index." + +issues = ["18127"] +pulls = ["18146"] diff --git a/changelog/6.0.0-rc.1/issue-18411.toml b/changelog/6.0.0-rc.1/issue-18411.toml new file mode 100644 index 000000000000..4ccdbb9da3e8 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18411.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix CertificatesProvisioning no support for remote reindex migration" + +issues = ["18411"] +pulls = ["18410"] diff --git a/changelog/6.0.0-rc.1/issue-18633.toml b/changelog/6.0.0-rc.1/issue-18633.toml new file mode 100644 index 000000000000..eb453b46c9db --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18633.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixed automatic pop-up triggering for clearing notifications on the events definition overview page." + +issues = ["18633"] +pulls = ["18646"] diff --git a/changelog/6.0.0-rc.1/issue-18718.toml b/changelog/6.0.0-rc.1/issue-18718.toml new file mode 100644 index 000000000000..42afebf2912d --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-18718.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Pipeline rule editor: Prevent Abandon Page Confirmation." + +issues = ["18718"] +pulls = ["18845"] diff --git a/changelog/6.0.0-rc.1/issue-4333.toml b/changelog/6.0.0-rc.1/issue-4333.toml new file mode 100644 index 000000000000..edd4f30e0ec6 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-4333.toml @@ -0,0 +1,7 @@ +type = "fixed" +message = "Error page was shown when navigating to search page with permissions to some streams (usually non-mesage streams, like events), but no permission for default stream. It has been fixed." + +issues = ["Graylog2/graylog-plugin-enterprise#4333"] +pulls = ["17548","Graylog2/graylog-plugin-enterprise#6245"] + + diff --git a/changelog/6.0.0-rc.1/issue-5620.toml b/changelog/6.0.0-rc.1/issue-5620.toml new file mode 100644 index 000000000000..a6c07b50fb73 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-5620.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Shorten time range of show received messages buttons on inputs, forwarder profiles and sidecars." + +issues = ["5620"] +pulls = ["5621"] diff --git a/changelog/6.0.0-rc.1/issue-6032.toml b/changelog/6.0.0-rc.1/issue-6032.toml new file mode 100644 index 000000000000..c5c05e1d72c6 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-6032.toml @@ -0,0 +1,13 @@ +type = "added" +message = "Added 4 pipeline functions for general use that were previously only available to Illuminate." + +issues = ["Graylog2/graylog-plugin-enterprise#6032"] +pulls = ["6112"] + +details.user = """ +The following pipeline functions were added: +- array_contains +- array_remove +- string_array_add +- lookup_all +""" diff --git a/changelog/6.0.0-rc.1/issue-7006.toml b/changelog/6.0.0-rc.1/issue-7006.toml new file mode 100644 index 000000000000..769004e59b51 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-7006.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Flag an error in rule source editor when variable name is a reserved word." + +issues = ["7006"] +pulls = ["17736"] diff --git a/changelog/6.0.0-rc.1/issue-7386.toml b/changelog/6.0.0-rc.1/issue-7386.toml new file mode 100644 index 000000000000..a28eae02846a --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-7386.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add search result statistics on the main search screen and execution info popover to each dashboard widget" + +pulls = ["16130"] +issues=["7386"] diff --git a/changelog/6.0.0-rc.1/issue-7629.toml b/changelog/6.0.0-rc.1/issue-7629.toml new file mode 100644 index 000000000000..007ef04333a8 --- /dev/null +++ b/changelog/6.0.0-rc.1/issue-7629.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add Create new dashboard widget action" + +issues = ["7629"] +pulls = ["17582"] diff --git a/changelog/6.0.0-rc.1/pr-16743.toml b/changelog/6.0.0-rc.1/pr-16743.toml new file mode 100644 index 000000000000..830e6a15c090 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-16743.toml @@ -0,0 +1,12 @@ +type = "c" +message = "Update io.prometheous:simpleclient to version 0.16.0." + +pulls = ["16743"] + +details.user = """ +This update contains a breaking change to the `jvm_classes_loaded` metric. +Prometheus queries referencing `jvm_classes_loaded` need to be adapted to +the new name `jvm_classes_currently_loaded`. + +See https://github.com/prometheus/client_java/pull/681. +""" diff --git a/changelog/6.0.0-rc.1/pr-16747.toml b/changelog/6.0.0-rc.1/pr-16747.toml new file mode 100644 index 000000000000..d16cf2733168 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-16747.toml @@ -0,0 +1,9 @@ +type = "changed" +message = "Update com.floreysoft:jmte from 5.0.0 to 7.0.2" + +pulls = ["16747"] + +details.user = """ +The handling of strings in 'if' commands in JMTE templates has changed, please consult the Upgrading Notes for details and instructions on how to change your custom templates, if necessary. +""" + diff --git a/changelog/6.0.0-rc.1/pr-16758.toml b/changelog/6.0.0-rc.1/pr-16758.toml new file mode 100644 index 000000000000..6e5b727982b9 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-16758.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added customizable HTTP notification that supports multiple methods and content types." + +issues = ["graylog-plugin-enterprise#3879", "graylog-plugin-enterprise#5577"] +pulls = ["16758"] diff --git a/changelog/6.0.0-rc.1/pr-17025.toml b/changelog/6.0.0-rc.1/pr-17025.toml new file mode 100644 index 000000000000..238460c86603 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17025.toml @@ -0,0 +1,4 @@ +type = "f" +message = "DataNode: if no node name was specified, use the hostname as node name default." + +pulls = ["17025"] diff --git a/changelog/6.0.0-rc.1/pr-17029.toml b/changelog/6.0.0-rc.1/pr-17029.toml new file mode 100644 index 000000000000..4e8049568862 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17029.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Implement option for graylog.conf to define shortest possible search auto-refresh interval." + +issues = ["16972", "17011", "Graylog2/graylog-plugin-enterprise#5138"] +pulls = ["17029"] diff --git a/changelog/6.0.0-rc.1/pr-17042.toml b/changelog/6.0.0-rc.1/pr-17042.toml new file mode 100644 index 000000000000..9f547d01e011 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17042.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Including basic data node usage information in telemetry." + +pulls = ["17042"] + diff --git a/changelog/6.0.0-rc.1/pr-17059.toml b/changelog/6.0.0-rc.1/pr-17059.toml new file mode 100644 index 000000000000..81cbc644bdcf --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17059.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixed opensearch configuration sync permissions" + +pulls = ["17059"] diff --git a/changelog/6.0.0-rc.1/pr-17070.toml b/changelog/6.0.0-rc.1/pr-17070.toml new file mode 100644 index 000000000000..91ebac42a8e4 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17070.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add index set field type management page" + +issues = ["17160"] +pulls = ["17070"] diff --git a/changelog/6.0.0-rc.1/pr-17073.toml b/changelog/6.0.0-rc.1/pr-17073.toml new file mode 100644 index 000000000000..cdcd46cf40f6 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17073.toml @@ -0,0 +1,7 @@ +type = "fixed" +message = "Fix Content pack install modal having two superposed modal element" + +issues = ["16834", "15752"] +pulls = ["17073"] + + diff --git a/changelog/6.0.0-rc.1/pr-17087.toml b/changelog/6.0.0-rc.1/pr-17087.toml new file mode 100644 index 000000000000..417178003ee8 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17087.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Exclude non-message streams from aggregation event searches." + +issues = ["Graylog2/graylog-plugin-enterprise#6042"] +pulls = ["17087"] diff --git a/changelog/6.0.0-rc.1/pr-17143.toml b/changelog/6.0.0-rc.1/pr-17143.toml new file mode 100644 index 000000000000..ddeab6685f2d --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17143.toml @@ -0,0 +1,4 @@ +type = "a" +message = "data node cluster name is configurable" + +pulls = ["17143"] diff --git a/changelog/6.0.0-rc.1/pr-17218.toml b/changelog/6.0.0-rc.1/pr-17218.toml new file mode 100644 index 000000000000..41754ff16d3c --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17218.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Show ignore_null configuration on lookup cache form." + +issues = ["15200"] +pulls = ["17218"] + diff --git a/changelog/6.0.0-rc.1/pr-17233.toml b/changelog/6.0.0-rc.1/pr-17233.toml new file mode 100644 index 000000000000..d09467c4e48e --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17233.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix redirect after creating contentpack revision." + +issues = ["17071"] +pulls = ["17233"] + diff --git a/changelog/6.0.0-rc.1/pr-17235.toml b/changelog/6.0.0-rc.1/pr-17235.toml new file mode 100644 index 000000000000..f0f217157cd4 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17235.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added support for POST and PUT requests for 'JSON path value from HTTP API' Input." + +issues = ["Graylog2/graylog-plugin-enterprise#6040"] +pulls = ["17235"] diff --git a/changelog/6.0.0-rc.1/pr-17248.toml b/changelog/6.0.0-rc.1/pr-17248.toml new file mode 100644 index 000000000000..f1eba925c4ad --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17248.toml @@ -0,0 +1,5 @@ +type = "a" +message = "data node life cycle management: removal of nodes" + +issues = ["16820"] +pulls = ["17248"] diff --git a/changelog/6.0.0-rc.1/pr-17255.toml b/changelog/6.0.0-rc.1/pr-17255.toml new file mode 100644 index 000000000000..f1f8237e4b66 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17255.toml @@ -0,0 +1,14 @@ +type = "c" +message = "Datanode directries don't use nodeid subdirs anymore" + +pulls = ["17255"] + +details.user = """ +This update changes handling of datanode uses directories for configuration, data and logs. Originally the configuration +paths opensearch_config_location, opensearch_data_location and opensearch_logs_location created subdirectories named +after the Node-ID of this datanode. These subdirectories then stored the actual data. From now on the datanode stores its +data directly in directories set in the configuration properties. + +This change is backwards compatible and will still use the Node-ID subdirectory if present, together with logging a warning +recommending a configuration change. +""" diff --git a/changelog/6.0.0-rc.1/pr-17280.toml b/changelog/6.0.0-rc.1/pr-17280.toml new file mode 100644 index 000000000000..77b7924aaec6 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17280.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Sidecar collector configuration form submit button not being clickable." + +issues = ["17185"] +pulls = ["17280"] diff --git a/changelog/6.0.0-rc.1/pr-17289.toml b/changelog/6.0.0-rc.1/pr-17289.toml new file mode 100644 index 000000000000..4fd07117bba5 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17289.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix inputs extractors list not updating after deletion" + +issues = ["16858"] +pulls = ["17289"] diff --git a/changelog/6.0.0-rc.1/pr-17309.toml b/changelog/6.0.0-rc.1/pr-17309.toml new file mode 100644 index 000000000000..554d0033af26 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17309.toml @@ -0,0 +1,10 @@ +type = "r" +message = "Remove web interface plugin `systemnavigation`." + +pulls = ["17309"] + +details.user = """ +Before this change it was possible to register options for the system dropdown in the navigation, by using the `systemnavigation` plugin. +Now this can be achieved by registering a `navigation` plugin. The plugin entity needs the `description` `System` and `children`. +Each children represents a dropdown option and needs a `path` and `description` attribute. +""" diff --git a/changelog/6.0.0-rc.1/pr-17348.toml b/changelog/6.0.0-rc.1/pr-17348.toml new file mode 100644 index 000000000000..7b3a73ed2dce --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17348.toml @@ -0,0 +1,8 @@ +type = "a" +message = """ +Adding Data Tiering functionality to Graylog Open providing users with a more intuitive and flexible +approach to configuring index rotation, retention, and data tiering. +""" + +issues = ["Graylog2/graylog-plugin-enterprise#6023"] +pulls = ["17348", "Graylog2/graylog-plugin-enterprise#6091"] diff --git a/changelog/6.0.0-rc.1/pr-17362.toml b/changelog/6.0.0-rc.1/pr-17362.toml new file mode 100644 index 000000000000..8fbfa56417ec --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17362.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Prevent adding same entity data table filter multiple times" + +pulls = ["17362"] diff --git a/changelog/6.0.0-rc.1/pr-17402.toml b/changelog/6.0.0-rc.1/pr-17402.toml new file mode 100644 index 000000000000..60dfbc0489c4 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17402.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add start/stop lifecycle functions to data node. Add validations for triggering data node lifecycle functions." + +issues = ["17383"] +pulls = ["17402"] diff --git a/changelog/6.0.0-rc.1/pr-17407.toml b/changelog/6.0.0-rc.1/pr-17407.toml new file mode 100644 index 000000000000..f8f8b491e648 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17407.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix allow displayName in systemConfiguration plugin export." + +issues = ["15939"] +pulls = ["17407"] diff --git a/changelog/6.0.0-rc.1/pr-17435.toml b/changelog/6.0.0-rc.1/pr-17435.toml new file mode 100644 index 000000000000..f5702bb9da43 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17435.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix editing of system notification event defitinions." + +issues = [""] +pulls = ["17435"] diff --git a/changelog/6.0.0-rc.1/pr-17449.toml b/changelog/6.0.0-rc.1/pr-17449.toml new file mode 100644 index 000000000000..ea21912853af --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17449.toml @@ -0,0 +1,10 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "Fix archiving when batch size exceeds ES/OS http.max_content_length." + +issues = ["Graylog2/graylog-plugin-enterprise#3318"] +pulls = ["17449"] + diff --git a/changelog/6.0.0-rc.1/pr-17456.toml b/changelog/6.0.0-rc.1/pr-17456.toml new file mode 100644 index 000000000000..ad5013f7d533 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17456.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Show correct throughput metric when messages are routed via pipeline rules." + +issues = ["Graylog2/graylog-plugin-enterprise#6138"] +pulls = ["17456"] diff --git a/changelog/6.0.0-rc.1/pr-17477.toml b/changelog/6.0.0-rc.1/pr-17477.toml new file mode 100644 index 000000000000..065e2ee98ef9 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17477.toml @@ -0,0 +1,8 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "Fix excessive logging of warnings for metrics requests after session expiration." + +pulls = ["17477"] diff --git a/changelog/6.0.0-rc.1/pr-17498.toml b/changelog/6.0.0-rc.1/pr-17498.toml new file mode 100644 index 000000000000..3de04e866420 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17498.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Added support for looking up Email Notification sender, reply to, and recipient emails in lookup tables." + +pulls = ["17498"] diff --git a/changelog/6.0.0-rc.1/pr-17552.toml b/changelog/6.0.0-rc.1/pr-17552.toml new file mode 100644 index 000000000000..4214318bb14d --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17552.toml @@ -0,0 +1,12 @@ +type = "f" +message = "By default, `none` and `close` index retention strategies are disabled for new installations." + +issues = ["graylog-plugin-enterprise#5888"] +pulls = ["17552"] + +details.user = """ +Configuration setting `disabled_retention_strategies` disables the specified retention strategies. +This is introduced in order to deprecate obsolete strategies in new installations. Strategies can be re-enabled +simply by removing from this list.
+**Do not extend this list on existing installs!** +""" diff --git a/changelog/6.0.0-rc.1/pr-17585.toml b/changelog/6.0.0-rc.1/pr-17585.toml new file mode 100644 index 000000000000..2170d645da9a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17585.toml @@ -0,0 +1,5 @@ +type = "r" +message = "Removed deprecated methods in org.graylog2.plugin.Message class : addStringFields, addLongFields, addDoubleFields, getValidationErrors" + +issues = [""] +pulls = ["17585"] diff --git a/changelog/6.0.0-rc.1/pr-17601.toml b/changelog/6.0.0-rc.1/pr-17601.toml new file mode 100644 index 000000000000..831b5782c1df --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17601.toml @@ -0,0 +1,7 @@ +type = "c" +message = "Changed logging in Message class. When invalid message key is found and ignored, that fact is logged with INFO level. Rate limited log is used in order to not overwhelm logs with this kind of log messages." + +issues = [""] +pulls = ["17601"] + + diff --git a/changelog/6.0.0-rc.1/pr-17611.toml b/changelog/6.0.0-rc.1/pr-17611.toml new file mode 100644 index 000000000000..6f05118588a3 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17611.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix issue preventing the array_contains pipeline function from working with json arrays" + +pulls = ["17611"] diff --git a/changelog/6.0.0-rc.1/pr-17618.toml b/changelog/6.0.0-rc.1/pr-17618.toml new file mode 100644 index 000000000000..9473d10755e8 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17618.toml @@ -0,0 +1,7 @@ +type = "r" +message = "Removed unused classes: org.graylog2.plugin.SingletonMessages and org.graylog.plugins.views.search.engine.LuceneQueryParsingException" + +issues = [""] +pulls = ["17618"] + + diff --git a/changelog/6.0.0-rc.1/pr-17642.toml b/changelog/6.0.0-rc.1/pr-17642.toml new file mode 100644 index 000000000000..c2c83bd86f1d --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17642.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix distribution of table column width for print version of data table and message list widget" + +issues = ["Graylog2/graylog-plugin-enterprise#6158"] +pulls = ["17642"] diff --git a/changelog/6.0.0-rc.1/pr-17656.toml b/changelog/6.0.0-rc.1/pr-17656.toml new file mode 100644 index 000000000000..f3f3877a4d94 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17656.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Remove entity from bulk select state, when removing single entity in tables with bulk actions." + +pulls = ["17656"] diff --git a/changelog/6.0.0-rc.1/pr-17678.toml b/changelog/6.0.0-rc.1/pr-17678.toml new file mode 100644 index 000000000000..e10012d5a5ff --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17678.toml @@ -0,0 +1,6 @@ +type = "f" +message = "Fixing text alignment of highlighted numbers in data tables." + +pulls = ["17678"] + + diff --git a/changelog/6.0.0-rc.1/pr-17693.toml b/changelog/6.0.0-rc.1/pr-17693.toml new file mode 100644 index 000000000000..7dd138e1fb81 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17693.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Unify the way numeric fields are displayed in the message table widget, compared with the data table widget." + +pulls = ["17693"] diff --git a/changelog/6.0.0-rc.1/pr-17707.toml b/changelog/6.0.0-rc.1/pr-17707.toml new file mode 100644 index 000000000000..084d34aa5813 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17707.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Fallback guide on how to do manual migration into the DataNode" + +pulls = ["17707"] diff --git a/changelog/6.0.0-rc.1/pr-17758.toml b/changelog/6.0.0-rc.1/pr-17758.toml new file mode 100644 index 000000000000..f4e9449972bd --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17758.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Wait for Datanode to become available during graylog server startup" + +pulls = ["17758"] diff --git a/changelog/6.0.0-rc.1/pr-17765.toml b/changelog/6.0.0-rc.1/pr-17765.toml new file mode 100644 index 000000000000..c8ce5f29debd --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17765.toml @@ -0,0 +1,9 @@ +type = "changed" +message = "Transition from javax to jakarta namespace." + +pulls = ["17765","Graylog2/graylog-plugin-enterprise#6347"] + +details.user = """ +This change is mostly relevant for plugin developers. Please consult the upgrade notes for +further information. +""" diff --git a/changelog/6.0.0-rc.1/pr-17775.toml b/changelog/6.0.0-rc.1/pr-17775.toml new file mode 100644 index 000000000000..ba3a190b6951 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17775.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Adding a resource to manage the CA to regular Graylog (not only during preflight)" + +pulls = ["17775"] diff --git a/changelog/6.0.0-rc.1/pr-17805.toml b/changelog/6.0.0-rc.1/pr-17805.toml new file mode 100644 index 000000000000..b94ac8ea066b --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17805.toml @@ -0,0 +1,5 @@ +type = "c" + message = "Adds a new optional parameter `exclude_empty_arrays` for pipeline function `select_jsonpath`." + +pulls = ["18674"] +issues = ["17805"] diff --git a/changelog/6.0.0-rc.1/pr-17820.toml b/changelog/6.0.0-rc.1/pr-17820.toml new file mode 100644 index 000000000000..b17c9158e921 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17820.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix issue preventing the lookup_all pipeline function from working with json arrays" + +issues = ["graylog-plugin-enterprise#6363"] +pulls = ["17820"] diff --git a/changelog/6.0.0-rc.1/pr-17880.toml b/changelog/6.0.0-rc.1/pr-17880.toml new file mode 100644 index 000000000000..910486036910 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17880.toml @@ -0,0 +1,13 @@ +type = "a" +message = "Add a `job_scheduler_concurrency_limits` configuration option to limit the parallel execution of job types." + +issues = ["graylog-plugin-enterprise#6313"] +pulls = ["graylog-plugin-enterprise#6399", "#17880"] + +details.user = """ +Adds the ability to configure cluster-wide max concurrency for specified job types. No more than this number of +instances of the job type will be executed in parallel (across the entire cluster). + +Example setting: +job_scheduler_concurrency_limits = event-processor-execution-v1:2,notification-execution-v1:2 +""" diff --git a/changelog/6.0.0-rc.1/pr-17909.toml b/changelog/6.0.0-rc.1/pr-17909.toml new file mode 100644 index 000000000000..953374a72cb6 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17909.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fixed bug causing null response from `array_contains` pipeline function when passed null array." + +pulls = ["17909"] diff --git a/changelog/6.0.0-rc.1/pr-17956.toml b/changelog/6.0.0-rc.1/pr-17956.toml new file mode 100644 index 000000000000..48b7390bc353 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17956.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add profile selection to index set edit/add form and index set field types page" + +pulls = ["17956"] +issues=["17751", "17750"] diff --git a/changelog/6.0.0-rc.1/pr-17965.toml b/changelog/6.0.0-rc.1/pr-17965.toml new file mode 100644 index 000000000000..96a0ef6692e4 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-17965.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Adding `hooks.logout` hook which is called when the user logs out" + +pulls = ["17965"] diff --git a/changelog/6.0.0-rc.1/pr-18001.toml b/changelog/6.0.0-rc.1/pr-18001.toml new file mode 100644 index 000000000000..3730a58526ee --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18001.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Added state map and corresponding rest resources for datanode migration wizard" + +pulls = ["18001"] diff --git a/changelog/6.0.0-rc.1/pr-18016.toml b/changelog/6.0.0-rc.1/pr-18016.toml new file mode 100644 index 000000000000..00e95f79ab3e --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18016.toml @@ -0,0 +1,11 @@ +type = "changed" +message = "Remove dependency to Mongojack 2" + +pulls = ["18016"] + +details.user = """ +The Java dependency on the Mongojack 2 library was removed and replaced with a +compatibility layer. Plugins that interact with MongoDB might need to be +modified if they use Mongojack functionality that is not commonly used +throughout the Graylog core code base. +""" diff --git a/changelog/6.0.0-rc.1/pr-18022.toml b/changelog/6.0.0-rc.1/pr-18022.toml new file mode 100644 index 000000000000..b45a84bd968a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18022.toml @@ -0,0 +1,5 @@ +type = "C" +message = "Trigger system notification for IO exceptions during HTTP data adapater lookup." + +pulls = ["18022"] +issues=["Graylog2/graylog-plugin-enterprise#6292"] diff --git a/changelog/6.0.0-rc.1/pr-18028.toml b/changelog/6.0.0-rc.1/pr-18028.toml new file mode 100644 index 000000000000..c6df6536df17 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18028.toml @@ -0,0 +1,11 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "Fix LDAP / AD authentication with legacy TLS." + +issues = [""] +pulls = ["18028"] + +contributors = [""] diff --git a/changelog/6.0.0-rc.1/pr-18067.toml b/changelog/6.0.0-rc.1/pr-18067.toml new file mode 100644 index 000000000000..2309d988ca4a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18067.toml @@ -0,0 +1,15 @@ +type = "a" +message = "Extend search query input with option to view search query history." + +issues = ["Graylog2/graylog-plugin-enterprise#4012"] +pulls = ["18067"] + +details.user = """ +The history is scoped by user and sorted chronologically. The history contains all queries executed on the search page, dashboards and dashboard widgets. +It can be opened by clicking on the history icon or by pressing the keyboard shortcut alt+shift+h. + +While it is open, it can be filtered using the query input. When a previous query has been selected, it will replace the current value of the query input. + +With this change we are also removing the keyboard shortcuts ctrl+space and ctrl+shift+space to manually display the suggestions. +It is still possible to display the suggestions by pressing alt+space. +""" diff --git a/changelog/6.0.0-rc.1/pr-18079.toml b/changelog/6.0.0-rc.1/pr-18079.toml new file mode 100644 index 000000000000..99aec966f593 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18079.toml @@ -0,0 +1,6 @@ +type = "added" +message = "add datanode migration ui" + +issues = [""] +pulls = ["18079"] + diff --git a/changelog/6.0.0-rc.1/pr-18091.toml b/changelog/6.0.0-rc.1/pr-18091.toml new file mode 100644 index 000000000000..2dd94b5fd2a7 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18091.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fix Session not timing out on index set details page" + +issues = ["18063"] +pulls = ["18091"] + diff --git a/changelog/6.0.0-rc.1/pr-18113.toml b/changelog/6.0.0-rc.1/pr-18113.toml new file mode 100644 index 000000000000..bb883297bd46 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18113.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added link to first start logs to log into preflight UI" + +pulls = ["18113"] + diff --git a/changelog/6.0.0-rc.1/pr-18121.toml b/changelog/6.0.0-rc.1/pr-18121.toml new file mode 100644 index 000000000000..5535f41c39e9 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18121.toml @@ -0,0 +1,6 @@ +type = "a" +message = "add bulk management capabilities for data nodes" + +issues = ["17732"] +pulls = ["18121"] + diff --git a/changelog/6.0.0-rc.1/pr-18124.toml b/changelog/6.0.0-rc.1/pr-18124.toml new file mode 100644 index 000000000000..e107508a3d58 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18124.toml @@ -0,0 +1,10 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "fixed" +message = "Fix support for changed IPinfo ASN mmdb files." + +issues = ["Graylog2/graylog-plugin-enterprise#6436"] +pulls = ["18124"] + diff --git a/changelog/6.0.0-rc.1/pr-18165.toml b/changelog/6.0.0-rc.1/pr-18165.toml new file mode 100644 index 000000000000..5254c051e097 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18165.toml @@ -0,0 +1,4 @@ +type = "security" +message = "Restrict classes allowed for cluster config and event types. [GHSA-p6gg-5hf4-4rgj](https://github.com/Graylog2/graylog2-server/security/advisories/GHSA-p6gg-5hf4-4rgj)" + +pulls = ["18165"] diff --git a/changelog/6.0.0-rc.1/pr-18171.toml b/changelog/6.0.0-rc.1/pr-18171.toml new file mode 100644 index 000000000000..74dd92ad5a13 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18171.toml @@ -0,0 +1,6 @@ +type = "f" +message = "Fix unintended HTML-escaping in system notification messages." + +issues = ["Graylog2/graylog-plugin-enterprise#6525"] +pulls = ["18171"] + diff --git a/changelog/6.0.0-rc.1/pr-18174.toml b/changelog/6.0.0-rc.1/pr-18174.toml new file mode 100644 index 000000000000..7d8454d1afea --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18174.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "More restrictive file permissions for support bundle files." + +pulls = ["18174"] diff --git a/changelog/6.0.0-rc.1/pr-18177.toml b/changelog/6.0.0-rc.1/pr-18177.toml new file mode 100644 index 000000000000..24b41a6e57e0 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18177.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Add permission check for displaying content pack uninstall details." + +pulls = ["18177"] diff --git a/changelog/6.0.0-rc.1/pr-18219.toml b/changelog/6.0.0-rc.1/pr-18219.toml new file mode 100644 index 000000000000..68e469ad0a6a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18219.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Avoid logging Graylog node ID multiple times during server startup." + +pulls = ["18219"] diff --git a/changelog/6.0.0-rc.1/pr-18241.toml b/changelog/6.0.0-rc.1/pr-18241.toml new file mode 100644 index 000000000000..d063a74009aa --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18241.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Do not display info for dashboard query filter in aggregation builder, when dashboard query is empty" + +pulls = ["18241"] diff --git a/changelog/6.0.0-rc.1/pr-18328.toml b/changelog/6.0.0-rc.1/pr-18328.toml new file mode 100644 index 000000000000..d6e7eadad021 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18328.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Require authentication for visiting API browser to limit information exposure." + +pulls = ["18328"] diff --git a/changelog/6.0.0-rc.1/pr-18342.toml b/changelog/6.0.0-rc.1/pr-18342.toml new file mode 100644 index 000000000000..f4d61fc79313 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18342.toml @@ -0,0 +1,4 @@ +type = "f" +message = "OpenSearch renamed cluster.initial_master_nodes to initial_cluster_manager_nodes, which was not reflected yet and is a possible cause of startup bugs." + +pulls = ["18342"] diff --git a/changelog/6.0.0-rc.1/pr-18343.toml b/changelog/6.0.0-rc.1/pr-18343.toml new file mode 100644 index 000000000000..ca8d1c914ee4 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18343.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Improve keyword validation in time range picker." + +pulls = ["18219"] diff --git a/changelog/6.0.0-rc.1/pr-18354.toml b/changelog/6.0.0-rc.1/pr-18354.toml new file mode 100644 index 000000000000..461f76ff3fb4 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18354.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Add a sensible default value for OpenSearch inside the DataNode for indices.query.bool.max_clause_count" + +pulls = ["18354"] diff --git a/changelog/6.0.0-rc.1/pr-18357.toml b/changelog/6.0.0-rc.1/pr-18357.toml new file mode 100644 index 000000000000..3643d9c7d150 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18357.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add support for searchable snapshots in data node" + +pulls = ["18357"] diff --git a/changelog/6.0.0-rc.1/pr-18369.toml b/changelog/6.0.0-rc.1/pr-18369.toml new file mode 100644 index 000000000000..c583a29ff55e --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18369.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Propagate path.repo configuration in datanode. Needed to support fs snapshot repositories" + +pulls = ["18369"] diff --git a/changelog/6.0.0-rc.1/pr-18372.toml b/changelog/6.0.0-rc.1/pr-18372.toml new file mode 100644 index 000000000000..357a45ff17a2 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18372.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Allow adding aggregations to event processor for use with EventModifiers" + +pulls = ["18372"] + diff --git a/changelog/6.0.0-rc.1/pr-18386.toml b/changelog/6.0.0-rc.1/pr-18386.toml new file mode 100644 index 000000000000..43b9fe517754 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18386.toml @@ -0,0 +1,4 @@ +type = "a" +message = "bumping OpenSearch inside the DataNode to 2.12.0" + +pulls = ["18386"] diff --git a/changelog/6.0.0-rc.1/pr-18430.toml b/changelog/6.0.0-rc.1/pr-18430.toml new file mode 100644 index 000000000000..7c68c2e0245d --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18430.toml @@ -0,0 +1,8 @@ +type = "changed" +message = "Update Apache Shiro to 2.0.0" + +pulls = ["18430"] + +details.user = """ +The Apache Shiro authentication and authorization library has been updated to the latest release. +""" diff --git a/changelog/6.0.0-rc.1/pr-18465.toml b/changelog/6.0.0-rc.1/pr-18465.toml new file mode 100644 index 000000000000..3130d1857b05 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18465.toml @@ -0,0 +1,4 @@ +type = "f" +message = "bumping the OpenSearch client to 2.12.0" + +pulls = ["18386"] diff --git a/changelog/6.0.0-rc.1/pr-18472.toml b/changelog/6.0.0-rc.1/pr-18472.toml new file mode 100644 index 000000000000..f861bdc4b787 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18472.toml @@ -0,0 +1,4 @@ +type = "changed" +message = "Consider `http_external_uri`when setting the cookie path." + +pulls = ["18472"] diff --git a/changelog/6.0.0-rc.1/pr-18492.toml b/changelog/6.0.0-rc.1/pr-18492.toml new file mode 100644 index 000000000000..6a1757a637df --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18492.toml @@ -0,0 +1,4 @@ +type = "a" +message = "show Journal Size in In-Place Migration steps" + +pulls = ["18492"] diff --git a/changelog/6.0.0-rc.1/pr-18522.toml b/changelog/6.0.0-rc.1/pr-18522.toml new file mode 100644 index 000000000000..a055473cc32d --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18522.toml @@ -0,0 +1,4 @@ +type = "a" +message = "create client certificates for the DataNode for 3rd party apps" + +pulls = ["18522"] diff --git a/changelog/6.0.0-rc.1/pr-18527.toml b/changelog/6.0.0-rc.1/pr-18527.toml new file mode 100644 index 000000000000..5fb2da8d4a0d --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18527.toml @@ -0,0 +1,6 @@ +type = "c" +message = "Change default second sort order to gl2_second_sort_field." + +issues = ["18348"] +pulls = ["18527"] + diff --git a/changelog/6.0.0-rc.1/pr-18535.toml b/changelog/6.0.0-rc.1/pr-18535.toml new file mode 100644 index 000000000000..f721cdee2ba3 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18535.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Introduce `MessageFactory` and `ResultMessageFactory` Java interfaces and change `Message` constructors to be package-private. (See upgrading notes for details!)" + +pulls = ["18535", "Graylog2/graylog-plugin-enterprise#6723"] diff --git a/changelog/6.0.0-rc.1/pr-18536.toml b/changelog/6.0.0-rc.1/pr-18536.toml new file mode 100644 index 000000000000..12e444bdd5b3 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18536.toml @@ -0,0 +1,13 @@ +type = "c" +message = "Datanode opensearch proxy endpoints support targetting specific nodes" + +pulls = ["18536"] + +details.user = """ +Both proxy URLs now contain a hostname path parameter: + +* /api/datanodes/{hostname}/request/{path: .*} for Opensearch APIs +* /api/datanodes/{hostname}/rest/{path: .*} for Datanode rest APIs + +The hostname path parameter can be either a real hostname of the datanode or one of "leader" and "any" keywords. +""" diff --git a/changelog/6.0.0-rc.1/pr-18542.toml b/changelog/6.0.0-rc.1/pr-18542.toml new file mode 100644 index 000000000000..f68bd7fcf086 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18542.toml @@ -0,0 +1,4 @@ +type = "f" +message = "fix RemoteReindexRunning step showing wrong actions" + +pulls = ["18542"] diff --git a/changelog/6.0.0-rc.1/pr-18559.toml b/changelog/6.0.0-rc.1/pr-18559.toml new file mode 100644 index 000000000000..649c90557192 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18559.toml @@ -0,0 +1,4 @@ +type = "f" +message = "fix duplicate license warning in DataNodesClusterManagementPage" + +pulls = ["18559"] diff --git a/changelog/6.0.0-rc.1/pr-18564.toml b/changelog/6.0.0-rc.1/pr-18564.toml new file mode 100644 index 000000000000..b45751a8952e --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18564.toml @@ -0,0 +1,12 @@ +type = "c" +message = "Compute indices overview statistics using `_cluster/stats` to reduce the amount of data returned." + +pulls = ["18564"] +issues = ["Graylog2/graylog-plugin-enterprise#6100"] + +details.user = """ +The System/Indices overview page displays some highlevel statistics. When there are a very large number +of indices and shards, this may result in a ContentTooLongException. + +Graylog now avoids this by using a different API which eliminates all the extraneous data. +""" diff --git a/changelog/6.0.0-rc.1/pr-18566.toml b/changelog/6.0.0-rc.1/pr-18566.toml new file mode 100644 index 000000000000..40efcf53f7e1 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18566.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Add UI for Data Node client certificate creation" + +issues = ["16466"] +pulls = ["18566"] diff --git a/changelog/6.0.0-rc.1/pr-18589.toml b/changelog/6.0.0-rc.1/pr-18589.toml new file mode 100644 index 000000000000..d591b7c000fd --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18589.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Improve parameter description of pipeline function `csv_to_map`" + +pulls = ["18707"] +issues = ["18589"] diff --git a/changelog/6.0.0-rc.1/pr-18609.toml b/changelog/6.0.0-rc.1/pr-18609.toml new file mode 100644 index 000000000000..d3d8e143076a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18609.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Modified default values for min and max shard size for Time-Size-Optimizing rotation strategy to better match typical node resourcing." + +pulls = ["18609"] +issues = ["Graylog2/graylog-plugin-enterprise#6611"] diff --git a/changelog/6.0.0-rc.1/pr-18621.toml b/changelog/6.0.0-rc.1/pr-18621.toml new file mode 100644 index 000000000000..921ce20ca7e5 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18621.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Add Remote reindex connection check, and display indices with errors." + +pulls = ["18621"] diff --git a/changelog/6.0.0-rc.1/pr-18688.toml b/changelog/6.0.0-rc.1/pr-18688.toml new file mode 100644 index 000000000000..a5c23b2bf70a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18688.toml @@ -0,0 +1,15 @@ +type = "c" +message = "Rename datanode opensearch proxy paths" + +pulls = ["18688"] + +details.user = """ +Opensearch proxy path is now containing /opensearch/ path instead of /request/: + +Before: +* /api/datanodes/{hostname}/request/{path: .*} + +After: +* /api/datanodes/{hostname}/opensearch/{path: .*} + +""" diff --git a/changelog/6.0.0-rc.1/pr-18742.toml b/changelog/6.0.0-rc.1/pr-18742.toml new file mode 100644 index 000000000000..01448ed37838 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18742.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Search query input in event definition form removing its value while typing." + +issues = ["18700"] +pulls = ["18742"] diff --git a/changelog/6.0.0-rc.1/pr-18749.toml b/changelog/6.0.0-rc.1/pr-18749.toml new file mode 100644 index 000000000000..a311cf37572d --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18749.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Add support for query params in datanode opensearch and datanode rest API proxies" + +pulls = ["18749"] + diff --git a/changelog/6.0.0-rc.1/pr-18754.toml b/changelog/6.0.0-rc.1/pr-18754.toml new file mode 100644 index 000000000000..c4f1516b9b0a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18754.toml @@ -0,0 +1,5 @@ +type = "f" +message = "improve Remote Reindex Migration Indices display UX." + +pulls = ["18754"] +issues=["Graylog2/graylog-plugin-enterprise#6844"] diff --git a/changelog/6.0.0-rc.1/pr-18769.toml b/changelog/6.0.0-rc.1/pr-18769.toml new file mode 100644 index 000000000000..d7930b2ddb67 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18769.toml @@ -0,0 +1,5 @@ +type = "c" +message = "When installing a content pack in cloud, ignore any inputs that are not cloud-compatible." + +pulls = ["18688", "Graylog2/graylog-plugin-enterprise#6854"] +issues = ["Graylog2/graylog-plugin-enterprise#5997"] diff --git a/changelog/6.0.0-rc.1/pr-18780.toml b/changelog/6.0.0-rc.1/pr-18780.toml new file mode 100644 index 000000000000..1ff173c68eac --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18780.toml @@ -0,0 +1,13 @@ +type = "a" +message = "Add 'all' keyword for datanode rest api proxy, aggregating responses from all connected datanodes" + +pulls = ["18780"] + +details.user = """ +Datanode rest proxy path is supporting 'all' keyword: + +* /api/datanodes/all/rest/{path: .*} + +The result will be a map of hostname -> response_content for each datanode. + +""" diff --git a/changelog/6.0.0-rc.1/pr-18787.toml b/changelog/6.0.0-rc.1/pr-18787.toml new file mode 100644 index 000000000000..12e3d4e2914a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18787.toml @@ -0,0 +1,6 @@ +type = "c" +message = "Report partial shard failures and fix sort order for search export." + +issues = [""] +pulls = ["18787"] + diff --git a/changelog/6.0.0-rc.1/pr-18802.toml b/changelog/6.0.0-rc.1/pr-18802.toml new file mode 100644 index 000000000000..deb9929667fd --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18802.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Remove not yet configured datanodes (empty transport address) from indexer discovery" + +pulls = ["18802"] diff --git a/changelog/6.0.0-rc.1/pr-18804.toml b/changelog/6.0.0-rc.1/pr-18804.toml new file mode 100644 index 000000000000..525dee1e017a --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18804.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix max. number of indices shown for Index Time Size Optimizing strategy." + +issues = ["14845"] +pulls = ["18804", "graylog-plugin-enterprise#6863"] diff --git a/changelog/6.0.0-rc.1/pr-18825.toml b/changelog/6.0.0-rc.1/pr-18825.toml new file mode 100644 index 000000000000..f525e39525a6 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18825.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Remote reindex migration in datanode now uses async opensearch tasks." + +pulls = ["18825"] +issues=["Graylog2/graylog-plugin-enterprise#6844"] diff --git a/changelog/6.0.0-rc.1/pr-18828.toml b/changelog/6.0.0-rc.1/pr-18828.toml new file mode 100644 index 000000000000..3ffec0800665 --- /dev/null +++ b/changelog/6.0.0-rc.1/pr-18828.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix clone modal immediately closing in Sidecar configuration" + +issues = ["18811"] +pulls = ["18828"] diff --git a/changelog/6.1.0-rc.1/.gitkeep b/changelog/6.1.0-rc.1/.gitkeep new file mode 100644 index 000000000000..900f0cb27b2e --- /dev/null +++ b/changelog/6.1.0-rc.1/.gitkeep @@ -0,0 +1 @@ +# Keep the directory in Git \ No newline at end of file diff --git a/changelog/6.1.0-rc.1/issue-13822.toml b/changelog/6.1.0-rc.1/issue-13822.toml new file mode 100644 index 000000000000..f08c6cd742a6 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-13822.toml @@ -0,0 +1,9 @@ +type = "fixed" +message = "Fix link target for search query error documentation links." + +issues = ["13822"] +pulls = ["20563"] + +details.user = """ +The documentation link now points to the error types section again, instead of just the search query page. +""" diff --git a/changelog/6.1.0-rc.1/issue-17730.toml b/changelog/6.1.0-rc.1/issue-17730.toml new file mode 100644 index 000000000000..2721d68e8d82 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-17730.toml @@ -0,0 +1,5 @@ +type="f" +message="Display Data Node Logs" + +issues=["17730"] +pulls=["19352"] diff --git a/changelog/6.1.0-rc.1/issue-17837.toml b/changelog/6.1.0-rc.1/issue-17837.toml new file mode 100644 index 000000000000..37580fc588cf --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-17837.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix zstd library loading issue on noexec-mounted /tmp directories." + +issues = ["17837"] +pulls = ["19971"] diff --git a/changelog/6.1.0-rc.1/issue-18288.toml b/changelog/6.1.0-rc.1/issue-18288.toml new file mode 100644 index 000000000000..d18aee376f40 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18288.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Display field value suggestions when field name is in the middle of the search query." + +issues = ["18288"] +pulls = ["18952"] diff --git a/changelog/6.1.0-rc.1/issue-18359.toml b/changelog/6.1.0-rc.1/issue-18359.toml new file mode 100644 index 000000000000..9bf87a8657b3 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18359.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Clarify rule simulation processing" + +issues = ["18359"] +pulls = ["19261"] diff --git a/changelog/6.1.0-rc.1/issue-18463.toml b/changelog/6.1.0-rc.1/issue-18463.toml new file mode 100644 index 000000000000..357bebc645da --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18463.toml @@ -0,0 +1,5 @@ +type="f" +message="Avoid double-qualifying navigation links when path prefix is configured." + +issues=["18463"] +pulls=["19511"] diff --git a/changelog/6.1.0-rc.1/issue-18809.toml b/changelog/6.1.0-rc.1/issue-18809.toml new file mode 100644 index 000000000000..ce7e6e46472f --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18809.toml @@ -0,0 +1,11 @@ +type = "f" +message = "Fixing several smaller bugs with keyword time range tab." + +issues = ["18809"] +pulls = ["18879"] + +details.user = """ +Before this change the time range picker submit button could be disabled after switching between the keyword tab and other tabs without any further changes. +Just opening the keyword tab and saving the keyword time range resulted in an error. +When opening the keyword tab, with the default time range, the time range preview was not shown. +""" diff --git a/changelog/6.1.0-rc.1/issue-18876.toml b/changelog/6.1.0-rc.1/issue-18876.toml new file mode 100644 index 000000000000..d57859e34f9b --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18876.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Datanode remote reindex migration UI features" + +issues = ["18876"] +pulls = ["19100"] diff --git a/changelog/6.1.0-rc.1/issue-18883.toml b/changelog/6.1.0-rc.1/issue-18883.toml new file mode 100644 index 000000000000..fef8e4e39ad9 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18883.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Grok field extraction for patterns with converters." + +issues = ["18883"] +pulls = ["18898"] diff --git a/changelog/6.1.0-rc.1/issue-18915.toml b/changelog/6.1.0-rc.1/issue-18915.toml new file mode 100644 index 000000000000..9a64152ea986 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18915.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Reset events widget pagination when executing search." + +issues = ["18915"] +pulls = ["18945"] diff --git a/changelog/6.1.0-rc.1/issue-18932.toml b/changelog/6.1.0-rc.1/issue-18932.toml new file mode 100644 index 000000000000..93720eb528f4 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18932.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix selecting option in more results select on pipeline simulator page." + +issues=["18932"] +pulls=["19413"] diff --git a/changelog/6.1.0-rc.1/issue-18949.toml b/changelog/6.1.0-rc.1/issue-18949.toml new file mode 100644 index 000000000000..faf3979dbe6e --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18949.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix permissions handling in events widget." + +issues = ["18949"] +pulls = ["18953"] diff --git a/changelog/6.1.0-rc.1/issue-18971.toml b/changelog/6.1.0-rc.1/issue-18971.toml new file mode 100644 index 000000000000..6ac9ffb59b48 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18971.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixing stream filtering in `events` search type." + +issues = ["18971"] +pulls = ["18972"] diff --git a/changelog/6.1.0-rc.1/issue-18975.toml b/changelog/6.1.0-rc.1/issue-18975.toml new file mode 100644 index 000000000000..bed3383a589f --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-18975.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing text color for unqualified fields in field select." + +issues = ["18975"] +pulls = ["18976"] diff --git a/changelog/6.1.0-rc.1/issue-19003.toml b/changelog/6.1.0-rc.1/issue-19003.toml new file mode 100644 index 000000000000..031815d509b8 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19003.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix log4j memory appender buffer size configuration." + +issues=["19003"] +pulls=["19208"] diff --git a/changelog/6.1.0-rc.1/issue-19007.toml b/changelog/6.1.0-rc.1/issue-19007.toml new file mode 100644 index 000000000000..f04ec6cdb822 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19007.toml @@ -0,0 +1,5 @@ +type="f" +message="Fixed user permission-based filtering on Event Definitions." + +issues=["19007"] +pulls=["19262"] diff --git a/changelog/6.1.0-rc.1/issue-19009.toml b/changelog/6.1.0-rc.1/issue-19009.toml new file mode 100644 index 000000000000..edfca2ede3fa --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19009.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Alerts & events search input is removing input values while typing." + +issues = ["19009"] +pulls = ["20516"] diff --git a/changelog/6.1.0-rc.1/issue-19012.toml b/changelog/6.1.0-rc.1/issue-19012.toml new file mode 100644 index 000000000000..00cd844e8119 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19012.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing 'unknown format' error on sidecar overview page." + +issues = ["19012"] +pulls = ["19128"] diff --git a/changelog/6.1.0-rc.1/issue-19014.toml b/changelog/6.1.0-rc.1/issue-19014.toml new file mode 100644 index 000000000000..d8c79984c531 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19014.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Do not show confirm dialog when saving event notification." + +issues = ["19014"] +pulls = ["19073"] diff --git a/changelog/6.1.0-rc.1/issue-19092.toml b/changelog/6.1.0-rc.1/issue-19092.toml new file mode 100644 index 000000000000..d5d35cd4f521 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19092.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix display tooltip for help icons in disabled menu items." + +issues=["19092"] +pulls=["19093"] diff --git a/changelog/6.1.0-rc.1/issue-19098.toml b/changelog/6.1.0-rc.1/issue-19098.toml new file mode 100644 index 000000000000..70b2dbe6ace6 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19098.toml @@ -0,0 +1,16 @@ +type="a" +message="Introduce new pipeline functions `remove_single_field` and `remove_multiple_fields` to (eventually) replace `remove_field`." + +details.user=""" +GL 5.1 added regex-matching to the pipeline function `remove_field`. This breaks existing pipeline rules that call +`remove_field` with a field name containing a regex reserved character, notably `.`. Performance of existing rules +may also be degraded. +Both issues are addressed by introducing alternate, more specific functions: +`remove_single_field` removes just a single field specified by name. It is simple and fast. +`remove_multiple_fields` removes fields matching a regex pattern and/or list of names. Depending on the +complexity of the matching it is slower. +'remove_field' will be deprecated and removed in the next major version. Do not use it. +""" + +issues=["19098"] +pulls=["19268"] diff --git a/changelog/6.1.0-rc.1/issue-19139.toml b/changelog/6.1.0-rc.1/issue-19139.toml new file mode 100644 index 000000000000..fe3021ecf3cd --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19139.toml @@ -0,0 +1,5 @@ +type="a" +message="Change placement of message widget export action" + +issues=["19139"] +pulls=["19140"] diff --git a/changelog/6.1.0-rc.1/issue-19159.toml b/changelog/6.1.0-rc.1/issue-19159.toml new file mode 100644 index 000000000000..23d02c9da9e8 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19159.toml @@ -0,0 +1,5 @@ +type="f" +message="Fixes handling of error results in the lookup cache." + +issues=["19159"] +pulls=["19670"] diff --git a/changelog/6.1.0-rc.1/issue-19195.toml b/changelog/6.1.0-rc.1/issue-19195.toml new file mode 100644 index 000000000000..aa5069a0509d --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19195.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Add sidecar filebeat collector for windows again." + +issues = ["19195"] +pulls = ["19325"] diff --git a/changelog/6.1.0-rc.1/issue-19207.toml b/changelog/6.1.0-rc.1/issue-19207.toml new file mode 100644 index 000000000000..c0489fce7059 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19207.toml @@ -0,0 +1,5 @@ +type="c" +message="Lazy configuration processing for datanode, collecting all errors in one go." + +issues=["19207"] +pulls=["19801"] diff --git a/changelog/6.1.0-rc.1/issue-19229.toml b/changelog/6.1.0-rc.1/issue-19229.toml new file mode 100644 index 000000000000..37e37d3ce6f6 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19229.toml @@ -0,0 +1,6 @@ + +type="f" +message="Data Node Migration: remove unnecessary steps/big texts/unavailable paths" + +issues=["19229"] +pulls=["19312"] diff --git a/changelog/6.1.0-rc.1/issue-19243.toml b/changelog/6.1.0-rc.1/issue-19243.toml new file mode 100644 index 000000000000..96940d3a24f5 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19243.toml @@ -0,0 +1,5 @@ +type="f" +message="Removed basic auth for datanode rest api. Only JWT supported now." + +issues=["19243"] +pulls=["19244"] diff --git a/changelog/6.1.0-rc.1/issue-19270.toml b/changelog/6.1.0-rc.1/issue-19270.toml new file mode 100644 index 000000000000..7e5347e3ed57 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19270.toml @@ -0,0 +1,5 @@ +type="f" +message="Datanode: Maximum downtime estimate during migration" + +issues=["19270"] +pulls=["19293"] diff --git a/changelog/6.1.0-rc.1/issue-19272.toml b/changelog/6.1.0-rc.1/issue-19272.toml new file mode 100644 index 000000000000..6413e41e82e4 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19272.toml @@ -0,0 +1,5 @@ +type="f" +message="Reuse http client for datanode connection checks during preflight configuration." + +issues=["19272"] +pulls=["19273"] diff --git a/changelog/6.1.0-rc.1/issue-19313.toml b/changelog/6.1.0-rc.1/issue-19313.toml new file mode 100644 index 000000000000..2c1b51db6c4a --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19313.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Fixes Recent Activity pagination, so that it does not ignore entries for deleted items." + +issues = ["19313"] +pulls = ["19394"] + diff --git a/changelog/6.1.0-rc.1/issue-19337.toml b/changelog/6.1.0-rc.1/issue-19337.toml new file mode 100644 index 000000000000..37ff9cd9daf3 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19337.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix issue with scroll position which could occur in news section on the startpage when scrolling by dragging cards." + +issues=["19337"] +pulls=["19341"] diff --git a/changelog/6.1.0-rc.1/issue-19354.toml b/changelog/6.1.0-rc.1/issue-19354.toml new file mode 100644 index 000000000000..e7e5436e2191 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19354.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added remediation steps to general events view." + +issues = ["19354"] +pulls = ["19853"] diff --git a/changelog/6.1.0-rc.1/issue-19402.toml b/changelog/6.1.0-rc.1/issue-19402.toml new file mode 100644 index 000000000000..3b4ee80f197f --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19402.toml @@ -0,0 +1,5 @@ +type="f" +message="Make allowlist for datanode remote reindex migration optional, automatically remove protocols." + +issues=["19402"] +pulls=["19682"] diff --git a/changelog/6.1.0-rc.1/issue-19486.toml b/changelog/6.1.0-rc.1/issue-19486.toml new file mode 100644 index 000000000000..5e4f937e4a03 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19486.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix issue with widget timerange, query and streams filter for events widget on dashboards." + +issues=["19486"] +pulls=["19537"] diff --git a/changelog/6.1.0-rc.1/issue-19562.toml b/changelog/6.1.0-rc.1/issue-19562.toml new file mode 100644 index 000000000000..bee32d5c22be --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19562.toml @@ -0,0 +1,5 @@ +type="f" +message="fix sidecar failure tracking page error" + +issues=["19562"] +pulls=["19570"] diff --git a/changelog/6.1.0-rc.1/issue-19580.toml b/changelog/6.1.0-rc.1/issue-19580.toml new file mode 100644 index 000000000000..ffe4ee316ac3 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19580.toml @@ -0,0 +1,5 @@ +type = "Added" +message = "Add export feature to events and investigations widget" + +issues = ["19580"] +pulls = ["19597", "graylog-plugin-enterprise#7587"] diff --git a/changelog/6.1.0-rc.1/issue-19643.toml b/changelog/6.1.0-rc.1/issue-19643.toml new file mode 100644 index 000000000000..38a98a2893d8 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19643.toml @@ -0,0 +1,5 @@ +type="f" +message="Fixing copying to clipboard in non-secure contexts." + +issues=["19643"] +pulls=["19654"] diff --git a/changelog/6.1.0-rc.1/issue-19693.toml b/changelog/6.1.0-rc.1/issue-19693.toml new file mode 100644 index 000000000000..115c70ed7f3c --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19693.toml @@ -0,0 +1,4 @@ +type="c" +message="Changed default setting for Kafka transports to non-legacy mode." + +issues=["19693"] diff --git a/changelog/6.1.0-rc.1/issue-19694.toml b/changelog/6.1.0-rc.1/issue-19694.toml new file mode 100644 index 000000000000..65bab141bc6a --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19694.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Sorting custom field mappings has been fixed, it does not fail on null values anymore." + +issues = ["19694"] +pulls = ["19960"] + diff --git a/changelog/6.1.0-rc.1/issue-19706.toml b/changelog/6.1.0-rc.1/issue-19706.toml new file mode 100644 index 000000000000..40aab7aa7f98 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19706.toml @@ -0,0 +1,5 @@ +type="f" +message="Disable automatic datanode cert renewal during preflight mode" + +issues=["19706"] +pulls=["19709"] diff --git a/changelog/6.1.0-rc.1/issue-19759.toml b/changelog/6.1.0-rc.1/issue-19759.toml new file mode 100644 index 000000000000..ef60f698d83c --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19759.toml @@ -0,0 +1,5 @@ +type="f" +message="Add support for unknown certificate authorities during the remote reindex migration to the datanode" + +issues=["19759"] +pulls=["19775"] diff --git a/changelog/6.1.0-rc.1/issue-19790.toml b/changelog/6.1.0-rc.1/issue-19790.toml new file mode 100644 index 000000000000..d7d32e11589a --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19790.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix CA upload delete icon XMARK not found" + +issues=["19790"] +pulls=["19791"] diff --git a/changelog/6.1.0-rc.1/issue-19792.toml b/changelog/6.1.0-rc.1/issue-19792.toml new file mode 100644 index 000000000000..f24740d70696 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19792.toml @@ -0,0 +1,5 @@ +type="f" +message="Make migration thread count counfigurable" + +issues=["19792"] +pulls=["19794"] diff --git a/changelog/6.1.0-rc.1/issue-19803.toml b/changelog/6.1.0-rc.1/issue-19803.toml new file mode 100644 index 000000000000..728c77bc0c59 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19803.toml @@ -0,0 +1,5 @@ +type="f" +message="See previous migration steps content by making them expendable but not editable" + +issues=["19803"] +pulls=["19825"] diff --git a/changelog/6.1.0-rc.1/issue-19845.toml b/changelog/6.1.0-rc.1/issue-19845.toml new file mode 100644 index 000000000000..8ced766c30bf --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19845.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix invalid link for OpenSeach documentation" + +issues=["19845"] +pulls=["20025"] diff --git a/changelog/6.1.0-rc.1/issue-19871.toml b/changelog/6.1.0-rc.1/issue-19871.toml new file mode 100644 index 000000000000..560a42b8340f --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19871.toml @@ -0,0 +1,5 @@ +type="f" +message="Disable data node migration wizard on installations using data node" + +issues=["19871"] +pulls=["20035"] diff --git a/changelog/6.1.0-rc.1/issue-19914.toml b/changelog/6.1.0-rc.1/issue-19914.toml new file mode 100644 index 000000000000..35ab8d2e6340 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19914.toml @@ -0,0 +1,5 @@ +type = "removed" +message = "Removed deprecated GreyNoise Community Data Adapter." + +issues = ["19914"] +pulls = ["19927"] diff --git a/changelog/6.1.0-rc.1/issue-19977.toml b/changelog/6.1.0-rc.1/issue-19977.toml new file mode 100644 index 000000000000..772230429056 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19977.toml @@ -0,0 +1,5 @@ +type="f" +message="Display timestamp in datanode migration logs" + +issues=["19977"] +pulls=["19993"] diff --git a/changelog/6.1.0-rc.1/issue-19997.toml b/changelog/6.1.0-rc.1/issue-19997.toml new file mode 100644 index 000000000000..9c51e30ab1f0 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-19997.toml @@ -0,0 +1,5 @@ +type="f" +message="Show previous values in remote reindex migration form" + +issues=["19997"] +pulls=["20008"] diff --git a/changelog/6.1.0-rc.1/issue-20086.toml b/changelog/6.1.0-rc.1/issue-20086.toml new file mode 100644 index 000000000000..1035cc2909ad --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20086.toml @@ -0,0 +1,5 @@ +type="a" +message="Datanode migration telemetry" + +issues=["20086"] +pulls=["20225"] diff --git a/changelog/6.1.0-rc.1/issue-20208.toml b/changelog/6.1.0-rc.1/issue-20208.toml new file mode 100644 index 000000000000..a66a9af365d5 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20208.toml @@ -0,0 +1,5 @@ +type="f" +message="Reopen target index for remote reindex migration if this is closed." + +issues=["20208"] +pulls=["20220"] diff --git a/changelog/6.1.0-rc.1/issue-20223.toml b/changelog/6.1.0-rc.1/issue-20223.toml new file mode 100644 index 000000000000..17826640a2e0 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20223.toml @@ -0,0 +1,5 @@ +type="a" +message="Added new Palo Alto Networks TCP (PAN-OS v11+) input." + +issues=["20223"] +pulls=["20236"] diff --git a/changelog/6.1.0-rc.1/issue-20241.toml b/changelog/6.1.0-rc.1/issue-20241.toml new file mode 100644 index 000000000000..3d6098083c95 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20241.toml @@ -0,0 +1,5 @@ +type="a" +message="Add unit formatting functionality for widgets" + +issues=["20241"] +pulls=["19109"] diff --git a/changelog/6.1.0-rc.1/issue-20409.toml b/changelog/6.1.0-rc.1/issue-20409.toml new file mode 100644 index 000000000000..d9b0cee3ce0f --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20409.toml @@ -0,0 +1,7 @@ +type = "fixed" +message = "Fix search bar query validation error showing double icons." + +issues = ["20409"] +pulls = ["20502"] + +contributors = [""] diff --git a/changelog/6.1.0-rc.1/issue-20434.toml b/changelog/6.1.0-rc.1/issue-20434.toml new file mode 100644 index 000000000000..7f25049ed6da --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20434.toml @@ -0,0 +1,5 @@ +type="f" +message="Use name instead of originName in hover labels templates to fix that advanced field types not resolved in label texts for charts with custom units" + +issues=["20434"] +pulls=["20453"] diff --git a/changelog/6.1.0-rc.1/issue-20448.toml b/changelog/6.1.0-rc.1/issue-20448.toml new file mode 100644 index 000000000000..095577eeb2b8 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20448.toml @@ -0,0 +1,5 @@ +type="f" +message="Use the correct Data Node configuration file server.conf instead of graylog.conf" + +issues=["20448"] +pulls=["20450"] diff --git a/changelog/6.1.0-rc.1/issue-20464.toml b/changelog/6.1.0-rc.1/issue-20464.toml new file mode 100644 index 000000000000..8208f20e8fe4 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20464.toml @@ -0,0 +1,5 @@ +type="f" +message="In cloud env display only node name instead of name with link in message details on the search page." + +issues=["20464"] +pulls=["20505"] diff --git a/changelog/6.1.0-rc.1/issue-20481.toml b/changelog/6.1.0-rc.1/issue-20481.toml new file mode 100644 index 000000000000..b5e9ed5849fb --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20481.toml @@ -0,0 +1,5 @@ +type="f" +message="Fix new Datanode EntityDataTable issues" + +issues=["20481"] +pulls=["20495"] diff --git a/changelog/6.1.0-rc.1/issue-20515.toml b/changelog/6.1.0-rc.1/issue-20515.toml new file mode 100644 index 000000000000..752d0fff0da2 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20515.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix streams page always shows horizontal scroll bar." + +issues = ["20515"] +pulls = ["20522"] diff --git a/changelog/6.1.0-rc.1/issue-20604.toml b/changelog/6.1.0-rc.1/issue-20604.toml new file mode 100644 index 000000000000..4f77b146cfc9 --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-20604.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix Datanode Details Page Error" + +issues = ["20604"] +pulls = ["20628"] diff --git a/changelog/6.1.0-rc.1/issue-5254.toml b/changelog/6.1.0-rc.1/issue-5254.toml new file mode 100644 index 000000000000..994852361a3a --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-5254.toml @@ -0,0 +1,5 @@ +type="a" +message="Added new Beats Kafka input to ingest system logs from Kafka topic using Filebeat." + +issues=["5254"] +pulls=["20067"] diff --git a/changelog/6.1.0-rc.1/issue-7396.toml b/changelog/6.1.0-rc.1/issue-7396.toml new file mode 100644 index 000000000000..c057db06b8bc --- /dev/null +++ b/changelog/6.1.0-rc.1/issue-7396.toml @@ -0,0 +1,5 @@ +type="c" +message="Always display input for widget title in widget edit mode." + +issues=["7396"] +pulls=["19737"] diff --git a/changelog/6.1.0-rc.1/pr-16894.toml b/changelog/6.1.0-rc.1/pr-16894.toml new file mode 100644 index 000000000000..f39017728c0a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-16894.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Display progress indicator for search auto refresh controls." + +pulls = ["16894"] diff --git a/changelog/6.1.0-rc.1/pr-18367.toml b/changelog/6.1.0-rc.1/pr-18367.toml new file mode 100644 index 000000000000..69ebe2fcc6aa --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-18367.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Improving API error responses for JSON parsing failures." + +pulls = ["19367"] diff --git a/changelog/6.1.0-rc.1/pr-18874.toml b/changelog/6.1.0-rc.1/pr-18874.toml new file mode 100644 index 000000000000..9a3c56717062 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-18874.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix scratchpad copy button" + +issues = ["18788"] +pulls = ["18874"] diff --git a/changelog/6.1.0-rc.1/pr-18907.toml b/changelog/6.1.0-rc.1/pr-18907.toml new file mode 100644 index 000000000000..d8bd15025f32 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-18907.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix unknown field caused_by in remote reindex migration task class" + +pulls = ["18907"] diff --git a/changelog/6.1.0-rc.1/pr-18956.toml b/changelog/6.1.0-rc.1/pr-18956.toml new file mode 100644 index 000000000000..e9d59149f6d5 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-18956.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Adds support for user-configurable index set templates." + +issues = [""] +pulls = ["18956"] diff --git a/changelog/6.1.0-rc.1/pr-19030.toml b/changelog/6.1.0-rc.1/pr-19030.toml new file mode 100644 index 000000000000..3b3f1357e3d3 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19030.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix index set deletion issues." + +issues = ["18994"] +pulls = ["19030"] diff --git a/changelog/6.1.0-rc.1/pr-19039.toml b/changelog/6.1.0-rc.1/pr-19039.toml new file mode 100644 index 000000000000..79bf9b0c49c7 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19039.toml @@ -0,0 +1,6 @@ +type = "fixed" +message = "Add beta info to data node preflight and overview page" + +pulls = ["19039"] +issues = ["19038"] + diff --git a/changelog/6.1.0-rc.1/pr-19056.toml b/changelog/6.1.0-rc.1/pr-19056.toml new file mode 100644 index 000000000000..1de0ca962746 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19056.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Actively wait for opensearch process termination in the datanode." + +pulls = ["19056"] diff --git a/changelog/6.1.0-rc.1/pr-19059.toml b/changelog/6.1.0-rc.1/pr-19059.toml new file mode 100644 index 000000000000..0e37e6f19460 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19059.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix rules editor showing warning when saving." + +issues = ["19008"] +pulls = ["19059"] diff --git a/changelog/6.1.0-rc.1/pr-19162.toml b/changelog/6.1.0-rc.1/pr-19162.toml new file mode 100644 index 000000000000..340a64c3cb46 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19162.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Conversion of MessageList into a tabular representation for exports. Added XLSXWriter" + +pulls = ["19162"] diff --git a/changelog/6.1.0-rc.1/pr-19170.toml b/changelog/6.1.0-rc.1/pr-19170.toml new file mode 100644 index 000000000000..363a2e53c95b --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19170.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Remove datanode leader flag. It's neither needed nor used." + +issues = ["18861"] +pulls = ["19170"] diff --git a/changelog/6.1.0-rc.1/pr-19225.toml b/changelog/6.1.0-rc.1/pr-19225.toml new file mode 100644 index 000000000000..1ecea9a932b7 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19225.toml @@ -0,0 +1,4 @@ +type="f" +message="Fixing full page refreshes for menu items with href." + +pulls=["19225"] diff --git a/changelog/6.1.0-rc.1/pr-19228.toml b/changelog/6.1.0-rc.1/pr-19228.toml new file mode 100644 index 000000000000..71421c208408 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19228.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "remove preflight resume startup" + +pulls = ["19228"] +issues=["Graylog2/graylog-plugin-enterprise#7009"] diff --git a/changelog/6.1.0-rc.1/pr-19241.toml b/changelog/6.1.0-rc.1/pr-19241.toml new file mode 100644 index 000000000000..6f82411338d8 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19241.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix data stream replicas and creation on single/multi-node data node setups." + +issues = ["18830", "19010"] +pulls = ["19241"] diff --git a/changelog/6.1.0-rc.1/pr-19260.toml b/changelog/6.1.0-rc.1/pr-19260.toml new file mode 100644 index 000000000000..8f38027681d2 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19260.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix manage extractors link for local input" + +issues = ["19111"] +pulls = ["19260"] diff --git a/changelog/6.1.0-rc.1/pr-19267.toml b/changelog/6.1.0-rc.1/pr-19267.toml new file mode 100644 index 000000000000..911d67e3606c --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19267.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixing full page refreshes for buttons with href." + +pulls = ["19267"] diff --git a/changelog/6.1.0-rc.1/pr-19271.toml b/changelog/6.1.0-rc.1/pr-19271.toml new file mode 100644 index 000000000000..5ece0290f15c --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19271.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Show maximum available time for restarting Graylog during in-place migration." + +issues = ["19270"] +pulls = ["19271"] diff --git a/changelog/6.1.0-rc.1/pr-19381.toml b/changelog/6.1.0-rc.1/pr-19381.toml new file mode 100644 index 000000000000..6b24269e590e --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19381.toml @@ -0,0 +1,5 @@ +type = "Added" +message = "Add possibility to select an index set template when creating an index set." + +issues = ["19075"] +pulls = ["19381", "graylog-plugin-enterprise#7363"] diff --git a/changelog/6.1.0-rc.1/pr-19404.toml b/changelog/6.1.0-rc.1/pr-19404.toml new file mode 100644 index 000000000000..29f384d5d578 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19404.toml @@ -0,0 +1,8 @@ +type = "f" +message = "Removed `Delete` and `Duplicate` options for Sigma event definitions." + +pulls = ["19404"] + +details.user = """ +Sigma event definitions are automatically deleted or duplicated when their parent Sigma rule is deleted or duplicated. +""" diff --git a/changelog/6.1.0-rc.1/pr-19439.toml b/changelog/6.1.0-rc.1/pr-19439.toml new file mode 100644 index 000000000000..fc00d2b41d50 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19439.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixed error when the string_array_add function is supplied a null value." + +pulls = ["19439"] +issues = ["graylog-plugin-enterprise#7262"] diff --git a/changelog/6.1.0-rc.1/pr-19463.toml b/changelog/6.1.0-rc.1/pr-19463.toml new file mode 100644 index 000000000000..0cb29c5e7267 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19463.toml @@ -0,0 +1,5 @@ +type="a" +message="Add option to select all fields for messages export widget" + +pulls = ["19463"] +issues=["Graylog2/graylog-plugin-enterprise#5616"] diff --git a/changelog/6.1.0-rc.1/pr-19478.toml b/changelog/6.1.0-rc.1/pr-19478.toml new file mode 100644 index 000000000000..9830d1f1ec84 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19478.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix upload CA tab not clickable" + +issues = ["19485"] +pulls = ["19478"] diff --git a/changelog/6.1.0-rc.1/pr-19487.toml b/changelog/6.1.0-rc.1/pr-19487.toml new file mode 100644 index 000000000000..47cdf527ee16 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19487.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fix sidecar undefined error" + +pulls = ["19487"] diff --git a/changelog/6.1.0-rc.1/pr-19519.toml b/changelog/6.1.0-rc.1/pr-19519.toml new file mode 100644 index 000000000000..ffe6ec084997 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19519.toml @@ -0,0 +1,6 @@ +type = "r" +message = "Removed default Content Pack for defunct abuse.ch ransomware tracker." + +issues = ["Graylog2/graylog-plugin-integrations#945"] +pulls = ["19519"] + diff --git a/changelog/6.1.0-rc.1/pr-19536.toml b/changelog/6.1.0-rc.1/pr-19536.toml new file mode 100644 index 000000000000..d25f859a7414 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19536.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Introduced DatanodeKeystore for unified handling of PKI inside data node" + +pulls = ["19536"] diff --git a/changelog/6.1.0-rc.1/pr-19539.toml b/changelog/6.1.0-rc.1/pr-19539.toml new file mode 100644 index 000000000000..84529b2b21e6 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19539.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Datanode UI fixes" + +issues = ["19467", "19477"] +pulls = ["19539"] diff --git a/changelog/6.1.0-rc.1/pr-19563.toml b/changelog/6.1.0-rc.1/pr-19563.toml new file mode 100644 index 000000000000..ad4fbf108322 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19563.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added support for scheduling Event Definitions using cron expressions." + +issues = ["graylog-plugin-enterprise#6598"] +pulls = ["19563", "graylog-plugin-enterprise#7526"] diff --git a/changelog/6.1.0-rc.1/pr-19587.toml b/changelog/6.1.0-rc.1/pr-19587.toml new file mode 100644 index 000000000000..6e8fdd59d770 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19587.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add pipeline connections management to stream details page." + +issues = [""] +pulls = ["19587"] diff --git a/changelog/6.1.0-rc.1/pr-19598.toml b/changelog/6.1.0-rc.1/pr-19598.toml new file mode 100644 index 000000000000..dc2dfa62faef --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19598.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Propagate search errors in legacy search REST API endpoint." + +issues=["14646"] +pulls = ["19598"] diff --git a/changelog/6.1.0-rc.1/pr-19599.toml b/changelog/6.1.0-rc.1/pr-19599.toml new file mode 100644 index 000000000000..8270473fb56d --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19599.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Data Node Migration Linkable Logs View" + +pulls = ["19599"] diff --git a/changelog/6.1.0-rc.1/pr-19600.toml b/changelog/6.1.0-rc.1/pr-19600.toml new file mode 100644 index 000000000000..efb418f6a14a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19600.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fix problem with missing field types on message details page." + +pulls = ["19600"] diff --git a/changelog/6.1.0-rc.1/pr-19630.toml b/changelog/6.1.0-rc.1/pr-19630.toml new file mode 100644 index 000000000000..231ab65be551 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19630.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Update URLs in DocHelper class to point to go2docs" + +pulls = ["19630"] diff --git a/changelog/6.1.0-rc.1/pr-19657.toml b/changelog/6.1.0-rc.1/pr-19657.toml new file mode 100644 index 000000000000..a20857ede341 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19657.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Show tasks progress in Remote Reindex Migration" + +pulls = ["19657"] diff --git a/changelog/6.1.0-rc.1/pr-19667.toml b/changelog/6.1.0-rc.1/pr-19667.toml new file mode 100644 index 000000000000..18358379d07a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19667.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Preflight UI fixes" + +issues = ["19285", "19286"] +pulls = ["19667"] diff --git a/changelog/6.1.0-rc.1/pr-19672.toml b/changelog/6.1.0-rc.1/pr-19672.toml new file mode 100644 index 000000000000..3ae8c87edb7a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19672.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add `FilteredMessageOutput` and `OutputFilter` Java interfaces." + +pulls = ["19672"] diff --git a/changelog/6.1.0-rc.1/pr-19673.toml b/changelog/6.1.0-rc.1/pr-19673.toml new file mode 100644 index 000000000000..56d1d16ec220 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19673.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixed routing of home nav icon to open the active perspective welcome page." + +issues = ["Graylog2/graylog-plugin-enterprise#7370"] +pulls = ["19673"] diff --git a/changelog/6.1.0-rc.1/pr-19691.toml b/changelog/6.1.0-rc.1/pr-19691.toml new file mode 100644 index 000000000000..66a9365d1f52 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19691.toml @@ -0,0 +1,5 @@ +type="a" +message="Display datanode version in datanodes overview under System / Data Nodes" + +issues=["19680", "18783"] +pulls=["19691"] diff --git a/changelog/6.1.0-rc.1/pr-19739.toml b/changelog/6.1.0-rc.1/pr-19739.toml new file mode 100644 index 000000000000..43b32a6dbd41 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19739.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add filters for filterable stream destinations." + +issues = ["graylog-plugin-enterprise#7210"] +pulls = ["19739"] diff --git a/changelog/6.1.0-rc.1/pr-19747.toml b/changelog/6.1.0-rc.1/pr-19747.toml new file mode 100644 index 000000000000..c202fd14ac60 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19747.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Migrate graylog CA to cluster config storage." + +pulls = ["19747"] diff --git a/changelog/6.1.0-rc.1/pr-19750.toml b/changelog/6.1.0-rc.1/pr-19750.toml new file mode 100644 index 000000000000..1300a5800968 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19750.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix processing failures due to lookup failures in the IPInfo geolocation database." + +issues = ["Graylog2/graylog-plugin-enterprise#7602"] +pulls = ["19750"] diff --git a/changelog/6.1.0-rc.1/pr-19763.toml b/changelog/6.1.0-rc.1/pr-19763.toml new file mode 100644 index 000000000000..0f39dc761401 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19763.toml @@ -0,0 +1,9 @@ +# PLEASE REMOVE COMMENTS AND OPTIONAL FIELDS! THANKS! + +# Entry type according to https://keepachangelog.com/en/1.0.0/ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "added" +message = "Add archive, outputs, pipelines, dataWarehouse column to stream overview." + +issues = [""] +pulls = ["19763"] diff --git a/changelog/6.1.0-rc.1/pr-19824.toml b/changelog/6.1.0-rc.1/pr-19824.toml new file mode 100644 index 000000000000..15775659d51e --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19824.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix indexer cluster health box on indices page to use correct distribution name." + +issues = ["19805"] +pulls = ["19824"] diff --git a/changelog/6.1.0-rc.1/pr-19828.toml b/changelog/6.1.0-rc.1/pr-19828.toml new file mode 100644 index 000000000000..7ebd3fc7072c --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19828.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add guard for matching graylog/datanode version in data node migration." + +pulls = ["19828"] diff --git a/changelog/6.1.0-rc.1/pr-19830.toml b/changelog/6.1.0-rc.1/pr-19830.toml new file mode 100644 index 000000000000..7740b2686d56 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19830.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Adding a Last Updated attribute to views/dashboard that is updated on save and initially initialised from the associated search for existing views." + +pulls = ["19830"] diff --git a/changelog/6.1.0-rc.1/pr-19846.toml b/changelog/6.1.0-rc.1/pr-19846.toml new file mode 100644 index 000000000000..41a7640d1232 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19846.toml @@ -0,0 +1,6 @@ +# One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +type = "added" +message = "Add Stream destination output filters UI." + +issues = ["7427"] +pulls = ["19846"] diff --git a/changelog/6.1.0-rc.1/pr-19857.toml b/changelog/6.1.0-rc.1/pr-19857.toml new file mode 100644 index 000000000000..5252e50f1a75 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19857.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Disabling interactive functionality in widgets when in non-interactive context." + +pulls = ["19857"] diff --git a/changelog/6.1.0-rc.1/pr-19865.toml b/changelog/6.1.0-rc.1/pr-19865.toml new file mode 100644 index 000000000000..a900bef06ec5 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19865.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add optional ignoreExtraFieldNames parameter to csv_to_map pipeline function" + +pulls = ["19865"] diff --git a/changelog/6.1.0-rc.1/pr-19879.toml b/changelog/6.1.0-rc.1/pr-19879.toml new file mode 100644 index 000000000000..30d9c05fc6c5 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19879.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Run remote reindex connection checks from datanodes, aggregate results." + +pulls = ["19879"] diff --git a/changelog/6.1.0-rc.1/pr-19921.toml b/changelog/6.1.0-rc.1/pr-19921.toml new file mode 100644 index 000000000000..9f045f5bffea --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19921.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Dropping legacy `dashboards` collection in migration." + +issues = ["19912"] +pulls = ["19921"] diff --git a/changelog/6.1.0-rc.1/pr-19926.toml b/changelog/6.1.0-rc.1/pr-19926.toml new file mode 100644 index 000000000000..6d1faddfa5f2 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19926.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fix concurrency problem with datanode migration context injection" + +pulls = ["19926"] diff --git a/changelog/6.1.0-rc.1/pr-19932.toml b/changelog/6.1.0-rc.1/pr-19932.toml new file mode 100644 index 000000000000..01211333df67 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19932.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Add /connection-check endpoint to datanode rest api proxy allowlist." + +pulls = ["19932"] diff --git a/changelog/6.1.0-rc.1/pr-19947.toml b/changelog/6.1.0-rc.1/pr-19947.toml new file mode 100644 index 000000000000..f625e7325b6f --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19947.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix parsing of opensearch allowlist during remote reindex migration" + +pulls = ["19947"] diff --git a/changelog/6.1.0-rc.1/pr-19951.toml b/changelog/6.1.0-rc.1/pr-19951.toml new file mode 100644 index 000000000000..36d7bdbd4d2b --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19951.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fixed issue where unescaped quotes in Custom HTTP notification JSON payloads breaks the notifications." + +pulls = ["19951", "20318"] diff --git a/changelog/6.1.0-rc.1/pr-19976.toml b/changelog/6.1.0-rc.1/pr-19976.toml new file mode 100644 index 000000000000..2c0a8f69867a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19976.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Indices not managed by Graylog are now marked and not selected by default in data node's remote reindex migration." + +pulls = ["19976"] +issues = ["19974"] diff --git a/changelog/6.1.0-rc.1/pr-19981.toml b/changelog/6.1.0-rc.1/pr-19981.toml new file mode 100644 index 000000000000..e8cea36173f0 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19981.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Remove last data node from list if no more data nodes are running." + +issues = ["19980"] +pulls = ["19981"] diff --git a/changelog/6.1.0-rc.1/pr-19982.toml b/changelog/6.1.0-rc.1/pr-19982.toml new file mode 100644 index 000000000000..95c9fd522570 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19982.toml @@ -0,0 +1,10 @@ +type = "a" +message = "Support configuration of `output_batch_size` by bytes." + +pulls = ["19982"] + +details.user = """ +The `output_batch_size` setting can now be configured by providing a byte-based value, e.g. `10 mb`. For backward +compatibility, the default value is still count-based (`500`). Previously configured count-based values are +still supported. +""" diff --git a/changelog/6.1.0-rc.1/pr-19983.toml b/changelog/6.1.0-rc.1/pr-19983.toml new file mode 100644 index 000000000000..7eb2c9ede0f1 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19983.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added Microsoft Teams Notification V2 which is compatible with Microsoft Workflows." + +issues = ["Graylog2/graylog-plugin-enterprise#7794"] +pulls = ["19983"] diff --git a/changelog/6.1.0-rc.1/pr-19990.toml b/changelog/6.1.0-rc.1/pr-19990.toml new file mode 100644 index 000000000000..f1884b8c5ae7 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-19990.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix duplicated index entries in remote reindex migration." + +pulls = ["19990"] diff --git a/changelog/6.1.0-rc.1/pr-20001.toml b/changelog/6.1.0-rc.1/pr-20001.toml new file mode 100644 index 000000000000..e71d7a741b5a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20001.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Implement table rows border color with more contrast." + +pulls = ["20001"] diff --git a/changelog/6.1.0-rc.1/pr-20004.toml b/changelog/6.1.0-rc.1/pr-20004.toml new file mode 100644 index 000000000000..6dbc6b60c043 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20004.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Disable world map interactions in non-interactive mode." + +pulls = ["20004"] diff --git a/changelog/6.1.0-rc.1/pr-20042.toml b/changelog/6.1.0-rc.1/pr-20042.toml new file mode 100644 index 000000000000..182a7c2a3d29 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20042.toml @@ -0,0 +1,5 @@ +type="f" +message="only hide DatanodeMigration if state 'FINISHED' and datanode is configured" + +issues=["19871"] +pulls=["20042"] diff --git a/changelog/6.1.0-rc.1/pr-20058.toml b/changelog/6.1.0-rc.1/pr-20058.toml new file mode 100644 index 000000000000..7e18fd9ebfb4 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20058.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Stop existing running datanodes during in-place migration." + +pulls = ["20058"] +issues = ["19833"] diff --git a/changelog/6.1.0-rc.1/pr-20073.toml b/changelog/6.1.0-rc.1/pr-20073.toml new file mode 100644 index 000000000000..4b84a5fa0a3a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20073.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Properly parse trailing padding bytes in IPFIX data sets." + +pulls = ["20073"] +issues = ["graylog-plugin-integrations#395"] diff --git a/changelog/6.1.0-rc.1/pr-20081.toml b/changelog/6.1.0-rc.1/pr-20081.toml new file mode 100644 index 000000000000..5f830d20e855 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20081.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Add support for remote-reindex migration of closed indices" + +pulls = ["20081"] +issues = ["20068"] diff --git a/changelog/6.1.0-rc.1/pr-20089.toml b/changelog/6.1.0-rc.1/pr-20089.toml new file mode 100644 index 000000000000..cb4dc7d74587 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20089.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix list of dashboards in widget's 'copy to dashboard' dialog to only contain allowed dashboards." + +issues=["7331"] +pulls = ["20089"] diff --git a/changelog/6.1.0-rc.1/pr-20090.toml b/changelog/6.1.0-rc.1/pr-20090.toml new file mode 100644 index 000000000000..33fc1dc56933 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20090.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add data node information to support bundle." + +pulls = ["20090"] +issues = ["graylog-plugin-enterprise#7966"] diff --git a/changelog/6.1.0-rc.1/pr-20094.toml b/changelog/6.1.0-rc.1/pr-20094.toml new file mode 100644 index 000000000000..aea62c9aaea3 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20094.toml @@ -0,0 +1,5 @@ +type = "f" +message = "fixed check for missing required int in legacy search api" + +pulls = ["20094"] +issues = ["7395"] diff --git a/changelog/6.1.0-rc.1/pr-20097.toml b/changelog/6.1.0-rc.1/pr-20097.toml new file mode 100644 index 000000000000..02a42b0821ff --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20097.toml @@ -0,0 +1,4 @@ +type = "a" +message = "bumping the OpenSearch (client, server in DataNode) to 2.15" + +pulls = ["20097"] diff --git a/changelog/6.1.0-rc.1/pr-20103.toml b/changelog/6.1.0-rc.1/pr-20103.toml new file mode 100644 index 000000000000..7c9a10c7b9b7 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20103.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add data node logs and migration logs to support bundle." + +pulls = ["20103"] +issues = ["graylog-plugin-enterprise#7964"] diff --git a/changelog/6.1.0-rc.1/pr-20110.toml b/changelog/6.1.0-rc.1/pr-20110.toml new file mode 100644 index 000000000000..a21e5828bf8f --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20110.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added categories to Streams to allow Illuminate content to be scoped to multiple products." + +issues = ["graylog-plugin-enterprise#7945"] +pulls = ["20110", "20160", "20199", "20371", "20373", "20380", "graylog-plugin-enterprise#8295", "graylog-plugin-enterprise#8418", "graylog-plugin-enterprise#8437", "graylog-plugin-enterprise#8452", "graylog-plugin-enterprise#8454" ] diff --git a/changelog/6.1.0-rc.1/pr-20120.toml b/changelog/6.1.0-rc.1/pr-20120.toml new file mode 100644 index 000000000000..9a6d8521cd88 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20120.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add certificates used by data nodes and Graylog to support bundle." + +pulls = ["20120"] +issues = ["graylog-plugin-enterprise#7968", "graylog-plugin-enterprise#7969", "graylog-plugin-enterprise#7970"] diff --git a/changelog/6.1.0-rc.1/pr-20121.toml b/changelog/6.1.0-rc.1/pr-20121.toml new file mode 100644 index 000000000000..15fcda35f89a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20121.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Stop index retention during remote reindex migration" + +pulls = ["20121"] +issues = ["graylog-plugin-enterprise#8097"] diff --git a/changelog/6.1.0-rc.1/pr-20123.toml b/changelog/6.1.0-rc.1/pr-20123.toml new file mode 100644 index 000000000000..f3ce9f8918a8 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20123.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Show system notification while remote reindexing during the data node migration." + +pulls = ["20123"] diff --git a/changelog/6.1.0-rc.1/pr-20222.toml b/changelog/6.1.0-rc.1/pr-20222.toml new file mode 100644 index 000000000000..8b254cc30b4b --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20222.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Pause index set rotation during remote reindex migration." + +pulls = ["20222"] +issues = ["graylog-plugin-enterprise#8281"] diff --git a/changelog/6.1.0-rc.1/pr-20237.toml b/changelog/6.1.0-rc.1/pr-20237.toml new file mode 100644 index 000000000000..a0cc84c4348b --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20237.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Remote Reindex UI improvements" + +pulls = ["20237"] diff --git a/changelog/6.1.0-rc.1/pr-20244.toml b/changelog/6.1.0-rc.1/pr-20244.toml new file mode 100644 index 000000000000..b83e58534bd8 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20244.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Fix error when installing content pack with stream_title parameter." + +pulls = ["20244"] diff --git a/changelog/6.1.0-rc.1/pr-20263.toml b/changelog/6.1.0-rc.1/pr-20263.toml new file mode 100644 index 000000000000..1647f57485fb --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20263.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed issue where grok_exists pipeline rule would return true for substrings of pattern names." + +issues = ["20264"] +pulls = ["20263"] diff --git a/changelog/6.1.0-rc.1/pr-20265.toml b/changelog/6.1.0-rc.1/pr-20265.toml new file mode 100644 index 000000000000..a0681f63fa0d --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20265.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fix issue where content pack stream dependencies were not properly resolved for some dashboards." + +pulls = ["20265", "20288"] diff --git a/changelog/6.1.0-rc.1/pr-20272.toml b/changelog/6.1.0-rc.1/pr-20272.toml new file mode 100644 index 000000000000..066700884bf4 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20272.toml @@ -0,0 +1,5 @@ +type = "fixed" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Fix tooltips not being visible in modals." + +issues = [""] +pulls = ["20272"] diff --git a/changelog/6.1.0-rc.1/pr-20287.toml b/changelog/6.1.0-rc.1/pr-20287.toml new file mode 100644 index 000000000000..2a68ee371852 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20287.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Reset `is_cancelled` flag on job trigger release." + +pulls = ["20287"] diff --git a/changelog/6.1.0-rc.1/pr-20313.toml b/changelog/6.1.0-rc.1/pr-20313.toml new file mode 100644 index 000000000000..de839dbf562a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20313.toml @@ -0,0 +1,5 @@ +type="f" +message="Automatically remove index write blocks during remote reindex migration" + +issues=["graylog-plugin-enterprise#8374"] +pulls=["20313"] diff --git a/changelog/6.1.0-rc.1/pr-20352.toml b/changelog/6.1.0-rc.1/pr-20352.toml new file mode 100644 index 000000000000..e507b03eeb07 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20352.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Display index overview by tier type." + +pulls = ["20352"] +issues=["Graylog2/graylog-plugin-enterprise#8400"] diff --git a/changelog/6.1.0-rc.1/pr-20366.toml b/changelog/6.1.0-rc.1/pr-20366.toml new file mode 100644 index 000000000000..873371955eef --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20366.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Datanode preflight check for node.lock, preventing parallel run of opensearch and datanode" + +pulls = ["20366"] diff --git a/changelog/6.1.0-rc.1/pr-20370.toml b/changelog/6.1.0-rc.1/pr-20370.toml new file mode 100644 index 000000000000..0069f1f29a32 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20370.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Added new Raw HTTP input. Also added support for custom authorization config option for HTTP Gelf and new HTTP Raw inputs." + +pulls = ["20370"] diff --git a/changelog/6.1.0-rc.1/pr-20379.toml b/changelog/6.1.0-rc.1/pr-20379.toml new file mode 100644 index 000000000000..3a46f3fa0139 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20379.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Use external URI for base URL of API browser." + +pulls = ["20379"] diff --git a/changelog/6.1.0-rc.1/pr-20388.toml b/changelog/6.1.0-rc.1/pr-20388.toml new file mode 100644 index 000000000000..caa713a8937d --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20388.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Remove Data Node beta banner" + +pulls = ["20388"] diff --git a/changelog/6.1.0-rc.1/pr-20389.toml b/changelog/6.1.0-rc.1/pr-20389.toml new file mode 100644 index 000000000000..5402d5f8e3e9 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20389.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixed issue where invalid JSON characters being unescaped broke TeamsNotificationV2 notifications." + +pulls = ["20389"] diff --git a/changelog/6.1.0-rc.1/pr-20426.toml b/changelog/6.1.0-rc.1/pr-20426.toml new file mode 100644 index 000000000000..76629b19ffbe --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20426.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixing plot legend for larger label sets." + +pulls = ["20426"] diff --git a/changelog/6.1.0-rc.1/pr-20427.toml b/changelog/6.1.0-rc.1/pr-20427.toml new file mode 100644 index 000000000000..03c24d5f5128 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20427.toml @@ -0,0 +1,5 @@ +type = "c" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "Display Warm Tier Search warning as streams with timestamp." + +issues = ["graylog-plugin-enterprise#8402"] +pulls = ["20427"] diff --git a/changelog/6.1.0-rc.1/pr-20546.toml b/changelog/6.1.0-rc.1/pr-20546.toml new file mode 100644 index 000000000000..6f9766677e53 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20546.toml @@ -0,0 +1,5 @@ +type="f" +message="Datanode informative certificate details" + +issues=["20542", "20543"] +pulls=["20546"] diff --git a/changelog/6.1.0-rc.1/pr-20558.toml b/changelog/6.1.0-rc.1/pr-20558.toml new file mode 100644 index 000000000000..66d13d5e600a --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20558.toml @@ -0,0 +1,5 @@ +type = "c" # One of: a(dded), c(hanged), d(eprecated), r(emoved), f(ixed), s(ecurity) +message = "API endpoint for listing users will not return an error anymore when user is lacking permission, but return current user only instead." + +issues = [] +pulls = ["20558"] diff --git a/changelog/6.1.0-rc.1/pr-20608.toml b/changelog/6.1.0-rc.1/pr-20608.toml new file mode 100644 index 000000000000..9de966f854a7 --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20608.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Added datanode permissions" + +pulls = ["20608"] +issues = ["graylog-plugin-enterprise#8779"] diff --git a/changelog/6.1.0-rc.1/pr-20636.toml b/changelog/6.1.0-rc.1/pr-20636.toml new file mode 100644 index 000000000000..6fdff14e736d --- /dev/null +++ b/changelog/6.1.0-rc.1/pr-20636.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Hide remote reindex migration behind a feature flag" + +pulls = ["20636"] +issues = ["graylog-plugin-enterprise#8811"] diff --git a/changelog/6.1.0-rc.1/pull-18954.toml b/changelog/6.1.0-rc.1/pull-18954.toml new file mode 100644 index 000000000000..18b4fff29774 --- /dev/null +++ b/changelog/6.1.0-rc.1/pull-18954.toml @@ -0,0 +1,5 @@ +type="f" +message="Tieing events widget to configured time range." + +issues=["18914"] +pulls=["18954"] diff --git a/changelog/unreleased/.gitkeep b/changelog/unreleased/.gitkeep new file mode 100644 index 000000000000..900f0cb27b2e --- /dev/null +++ b/changelog/unreleased/.gitkeep @@ -0,0 +1 @@ +# Keep the directory in Git \ No newline at end of file diff --git a/changelog/unreleased/issue-15301.toml b/changelog/unreleased/issue-15301.toml new file mode 100644 index 000000000000..459a41bab1b6 --- /dev/null +++ b/changelog/unreleased/issue-15301.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added multi_grok pipeline rule to support processing a string against many grok patterns at once." + +issues = ["15301"] +pulls = ["20924"] diff --git a/changelog/unreleased/issue-18388.toml b/changelog/unreleased/issue-18388.toml new file mode 100644 index 000000000000..607b6428312b --- /dev/null +++ b/changelog/unreleased/issue-18388.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix custom highlighting of field values in the message details of the message table widget" + +issues = ["18388"] +pulls = ["20921"] diff --git a/changelog/unreleased/issue-18563.toml b/changelog/unreleased/issue-18563.toml new file mode 100644 index 000000000000..7298d46d501d --- /dev/null +++ b/changelog/unreleased/issue-18563.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Improve performance of close and delete actions when working with a lot of index sets." + +issues = ["18563"] +pulls = ["21195"] diff --git a/changelog/unreleased/issue-18836.toml b/changelog/unreleased/issue-18836.toml new file mode 100644 index 000000000000..573de99b88c0 --- /dev/null +++ b/changelog/unreleased/issue-18836.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Add a note clarifying how making changes to index sets does not apply to existing indices" + +issues = ["18836"] +pulls = ["20659"] diff --git a/changelog/unreleased/issue-19058.toml b/changelog/unreleased/issue-19058.toml new file mode 100644 index 000000000000..b673bf48a302 --- /dev/null +++ b/changelog/unreleased/issue-19058.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing highlighting of message in message table by id." + +issues = ["19058"] +pulls = ["21389"] diff --git a/changelog/unreleased/issue-19287.toml b/changelog/unreleased/issue-19287.toml new file mode 100644 index 000000000000..47f8943eaf9d --- /dev/null +++ b/changelog/unreleased/issue-19287.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Log usage of deprecated pipeline functions, specifically `remove_field`." + +issues = ["19287"] +pulls = ["21386"] diff --git a/changelog/unreleased/issue-19975.toml b/changelog/unreleased/issue-19975.toml new file mode 100644 index 000000000000..eaf08be03bd1 --- /dev/null +++ b/changelog/unreleased/issue-19975.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Catch and report exceptions during grok pattern matching." + +issues=["19975"] +pulls = ["21290"] diff --git a/changelog/unreleased/issue-20423.toml b/changelog/unreleased/issue-20423.toml new file mode 100644 index 000000000000..926db481139a --- /dev/null +++ b/changelog/unreleased/issue-20423.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixing keyboard shortcut conflicts for opening query history by changing it to ctrl+space on win and linux os" + +issues = ["20423"] +pulls = ["21336"] diff --git a/changelog/unreleased/issue-20461.toml b/changelog/unreleased/issue-20461.toml new file mode 100644 index 000000000000..68eae1b7b283 --- /dev/null +++ b/changelog/unreleased/issue-20461.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Set datanode remote reindex status to error if any log entry is of error type" + +issues = ["20461"] +pulls = ["20622"] diff --git a/changelog/unreleased/issue-20465.toml b/changelog/unreleased/issue-20465.toml new file mode 100644 index 000000000000..80bc3da35f75 --- /dev/null +++ b/changelog/unreleased/issue-20465.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed the issue when the streams link in the message table was not working on the streams page." + +issues=["20465"] +pulls = ["21338"] diff --git a/changelog/unreleased/issue-20503.toml b/changelog/unreleased/issue-20503.toml new file mode 100644 index 000000000000..e219ad0d126f --- /dev/null +++ b/changelog/unreleased/issue-20503.toml @@ -0,0 +1,5 @@ +type="c" +message="Datanode preflight check that verifies if hostname is bound to an IP" + +issues=["20503"] +pulls=["20492"] diff --git a/changelog/unreleased/issue-20571.toml b/changelog/unreleased/issue-20571.toml new file mode 100644 index 000000000000..cac5a266c5cc --- /dev/null +++ b/changelog/unreleased/issue-20571.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Suppress repeated error logging for Whois lookup adapter when following redirects." + +issues = ["20571"] +pulls = ["21838"] diff --git a/changelog/unreleased/issue-20592.toml b/changelog/unreleased/issue-20592.toml new file mode 100644 index 000000000000..9ba751378873 --- /dev/null +++ b/changelog/unreleased/issue-20592.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix that non visible items are selected in index sets table when changing field type" + +issues = ["20592"] +pulls = ["20616"] diff --git a/changelog/unreleased/issue-20738.toml b/changelog/unreleased/issue-20738.toml new file mode 100644 index 000000000000..06049f45e786 --- /dev/null +++ b/changelog/unreleased/issue-20738.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix encoding of CName in generated CA certificates" + +issues = ["20738"] +pulls = ["20745"] diff --git a/changelog/unreleased/issue-20785.toml b/changelog/unreleased/issue-20785.toml new file mode 100644 index 000000000000..a0abc48909cb --- /dev/null +++ b/changelog/unreleased/issue-20785.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Removed noisy system notification when event limit is exceeded." + +issues = ["20785"] +pulls = ["21080"] diff --git a/changelog/unreleased/issue-20789.toml b/changelog/unreleased/issue-20789.toml new file mode 100644 index 000000000000..ee4fe1231396 --- /dev/null +++ b/changelog/unreleased/issue-20789.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing text color of bar labels in bar chart, which are displayed outside of a bar." + +issues = ["20789"] +pulls = ["20948"] diff --git a/changelog/unreleased/issue-20790.toml b/changelog/unreleased/issue-20790.toml new file mode 100644 index 000000000000..1604d0e4048e --- /dev/null +++ b/changelog/unreleased/issue-20790.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing output class name for prometheus exporter." + +issues = ["20790"] +pulls = ["20844"] diff --git a/changelog/unreleased/issue-20800.toml b/changelog/unreleased/issue-20800.toml new file mode 100644 index 000000000000..508e07ded10d --- /dev/null +++ b/changelog/unreleased/issue-20800.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing up extraction of effective time range for async searches." + +issues = ["20800"] +pulls = ["20808"] diff --git a/changelog/unreleased/issue-20809.toml b/changelog/unreleased/issue-20809.toml new file mode 100644 index 000000000000..a70f402f9514 --- /dev/null +++ b/changelog/unreleased/issue-20809.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added new config options to Email Notifications for send as single email, CC emails/users, and BCC emails/users." + +issues = ["20809"] +pulls = ["20836"] diff --git a/changelog/unreleased/issue-20833.toml b/changelog/unreleased/issue-20833.toml new file mode 100644 index 000000000000..7ae99e4a12cb --- /dev/null +++ b/changelog/unreleased/issue-20833.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Hiding zoom-out notifier when zooming in on graph." + +issues = ["20833"] +pulls = ["21850"] diff --git a/changelog/unreleased/issue-20876.toml b/changelog/unreleased/issue-20876.toml new file mode 100644 index 000000000000..d3618256c77f --- /dev/null +++ b/changelog/unreleased/issue-20876.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixing `unkown format` issue in widgets when using gradiant highlighting." + +issues = ["20876"] +pulls = ["20882"] diff --git a/changelog/unreleased/issue-20881.toml b/changelog/unreleased/issue-20881.toml new file mode 100644 index 000000000000..64c5a84a6bd5 --- /dev/null +++ b/changelog/unreleased/issue-20881.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Implement common entity data table with sorting and filtering for events page" + +issues = ["20881"] +pulls = ["20831", "graylog-plugin-enterprise#9020"] diff --git a/changelog/unreleased/issue-20899.toml b/changelog/unreleased/issue-20899.toml new file mode 100644 index 000000000000..4687cd01f863 --- /dev/null +++ b/changelog/unreleased/issue-20899.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix handling of path prefix in `useSendTelemetry`." + +issues = ["20899"] +pulls = ["20909"] diff --git a/changelog/unreleased/issue-20919.toml b/changelog/unreleased/issue-20919.toml new file mode 100644 index 000000000000..7da5362a34b8 --- /dev/null +++ b/changelog/unreleased/issue-20919.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add datanode.log to support bundle." + +pulls = ["21553"] +issues = ["20919"] diff --git a/changelog/unreleased/issue-20925.toml b/changelog/unreleased/issue-20925.toml new file mode 100644 index 000000000000..d4c07b42475e --- /dev/null +++ b/changelog/unreleased/issue-20925.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix failing of executing search on event definition page" + +issues = ["20925"] +pulls = ["20953"] diff --git a/changelog/unreleased/issue-20926.toml b/changelog/unreleased/issue-20926.toml new file mode 100644 index 000000000000..444b9f3202b5 --- /dev/null +++ b/changelog/unreleased/issue-20926.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Data node: store keystore and truststore passwords in Opensearch keystore" + +issues = ["20926"] +pulls = ["20923"] diff --git a/changelog/unreleased/issue-20928.toml b/changelog/unreleased/issue-20928.toml new file mode 100644 index 000000000000..e11448e95d61 --- /dev/null +++ b/changelog/unreleased/issue-20928.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Client certificate validity configurable in data node" + +issues = ["20928"] +pulls = ["20941"] diff --git a/changelog/unreleased/issue-20932.toml b/changelog/unreleased/issue-20932.toml new file mode 100644 index 000000000000..c1e54d1e56f2 --- /dev/null +++ b/changelog/unreleased/issue-20932.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix error when ticking or unticking show event annotations" + +issues = ["20932"] +pulls = ["21028"] diff --git a/changelog/unreleased/issue-20955.toml b/changelog/unreleased/issue-20955.toml new file mode 100644 index 000000000000..7f3232612b68 --- /dev/null +++ b/changelog/unreleased/issue-20955.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix unescaped double quotes in map and collection typed fields in Custom HTTP Notification JSON body." + +issues = ["20955"] +pulls = ["21167"] diff --git a/changelog/unreleased/issue-20965.toml b/changelog/unreleased/issue-20965.toml new file mode 100644 index 000000000000..945c311ad85e --- /dev/null +++ b/changelog/unreleased/issue-20965.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add unit handling to a single number widget" + +issues = ["20965"] +pulls = ["21339"] diff --git a/changelog/unreleased/issue-21015.toml b/changelog/unreleased/issue-21015.toml new file mode 100644 index 000000000000..56a2e06c0c57 --- /dev/null +++ b/changelog/unreleased/issue-21015.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Handle path prefix when serving web interface assets." + +issues = ["21015"] +pulls = ["21104"] diff --git a/changelog/unreleased/issue-21145.toml b/changelog/unreleased/issue-21145.toml new file mode 100644 index 000000000000..0c1a32761a86 --- /dev/null +++ b/changelog/unreleased/issue-21145.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix when widget title input in widget edit mode looses cursor position on change" + +pulls = ["21327"] +issues = ["21145"] diff --git a/changelog/unreleased/issue-21185.toml b/changelog/unreleased/issue-21185.toml new file mode 100644 index 000000000000..e937b69565f6 --- /dev/null +++ b/changelog/unreleased/issue-21185.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix displaying very small percentages." + +issues = ["21185"] +pulls = ["21368"] diff --git a/changelog/unreleased/issue-21223.toml b/changelog/unreleased/issue-21223.toml new file mode 100644 index 000000000000..3fd2dad2212a --- /dev/null +++ b/changelog/unreleased/issue-21223.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix JNA temp directory to be located in the configured server data dir." + +issues = ["21223"] +pulls = ["21298"] diff --git a/changelog/unreleased/issue-21248.toml b/changelog/unreleased/issue-21248.toml new file mode 100644 index 000000000000..7433226acd9b --- /dev/null +++ b/changelog/unreleased/issue-21248.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Add error handling for event definition bulk action on general perspective." + +pulls = ["21248"] diff --git a/changelog/unreleased/issue-21272.toml b/changelog/unreleased/issue-21272.toml new file mode 100644 index 000000000000..d61038520727 --- /dev/null +++ b/changelog/unreleased/issue-21272.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixed poor performance of CIDR data adapter lookups." + +issues = ["21272"] +pulls = ["21474", "graylog-plugin-enterprise#9628"] diff --git a/changelog/unreleased/issue-21282.toml b/changelog/unreleased/issue-21282.toml new file mode 100644 index 000000000000..22a8d539a5df --- /dev/null +++ b/changelog/unreleased/issue-21282.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Retrying indexing messages failing due to circuit breaker exception." + +issues = ["21282"] +pulls = ["21313"] diff --git a/changelog/unreleased/issue-21296.toml b/changelog/unreleased/issue-21296.toml new file mode 100644 index 000000000000..8bc2e620dd24 --- /dev/null +++ b/changelog/unreleased/issue-21296.toml @@ -0,0 +1,12 @@ +type = "a" +message = "Add option to render a pluggable navigation item after another specific item." + +issues = ["21296"] +pulls = ["21827"] + + +details.user = """ +It is now possible to render a pluggable navigation item after another specific navigation item. +For example when you want to display a navigation item after the "Streams" link you can extend it with `position: { after: 'Search' }` to it, +before registering it in the plugin store. +""" diff --git a/changelog/unreleased/issue-21321.toml b/changelog/unreleased/issue-21321.toml new file mode 100644 index 000000000000..43698954c920 --- /dev/null +++ b/changelog/unreleased/issue-21321.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added overview-page for access-tokens, accessible by admins." + +issues = ["21321", "21396"] +pulls = ["21315"] diff --git a/changelog/unreleased/issue-21322.toml b/changelog/unreleased/issue-21322.toml new file mode 100644 index 000000000000..b9773596fca4 --- /dev/null +++ b/changelog/unreleased/issue-21322.toml @@ -0,0 +1,5 @@ +type="a" +message="Added new Configuration to define default for allowing access tokens for external users." + +issues=["21322"] +pulls=["21438"] diff --git a/changelog/unreleased/issue-21351.toml b/changelog/unreleased/issue-21351.toml new file mode 100644 index 000000000000..605b5d308498 --- /dev/null +++ b/changelog/unreleased/issue-21351.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Add default auto refresh config on events and alerts page" + +issues = ["21350"] +pulls = ["21351"] diff --git a/changelog/unreleased/issue-21397.toml b/changelog/unreleased/issue-21397.toml new file mode 100644 index 000000000000..3779ebb3d2aa --- /dev/null +++ b/changelog/unreleased/issue-21397.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Adds token management page in the 'Users and Teams' section." + +issues = ["21397"] +pulls = ["21737"] diff --git a/changelog/unreleased/issue-21398.toml b/changelog/unreleased/issue-21398.toml new file mode 100644 index 000000000000..6835d1a97e60 --- /dev/null +++ b/changelog/unreleased/issue-21398.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix an issue causing saving searches/dashboards after clearing the unit type to fail with error." + +pulls = ["21399"] +issues = ["21398"] diff --git a/changelog/unreleased/issue-21408.toml b/changelog/unreleased/issue-21408.toml new file mode 100644 index 000000000000..3ba83d3cad43 --- /dev/null +++ b/changelog/unreleased/issue-21408.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Prevent messages from being ingested with timestamps that are in the future. Invalid timestamps will be forced to the receive time or current time." + +pulls = ["21429"] +issues = ["21408"] diff --git a/changelog/unreleased/issue-21509.toml b/changelog/unreleased/issue-21509.toml new file mode 100644 index 000000000000..1215033a05ac --- /dev/null +++ b/changelog/unreleased/issue-21509.toml @@ -0,0 +1,5 @@ +type="a" +message="Added new Configuration to define default for restricting access tokens to admins only." + +issues=["21509"] +pulls=["21619"] diff --git a/changelog/unreleased/issue-21565.toml b/changelog/unreleased/issue-21565.toml new file mode 100644 index 000000000000..75d2958e908c --- /dev/null +++ b/changelog/unreleased/issue-21565.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixed issue preventing quoted entity search values from finding exact matches on various pages throughout Graylog." + +issues = ["21565"] +pulls = ["21567"] diff --git a/changelog/unreleased/issue-21653.toml b/changelog/unreleased/issue-21653.toml new file mode 100644 index 000000000000..30907aeb4c78 --- /dev/null +++ b/changelog/unreleased/issue-21653.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Show helpful error/explanation on message permalink page when index/message is missing." + +pulls = ["21663"] +issues = ["21653"] diff --git a/changelog/unreleased/issue-21660.toml b/changelog/unreleased/issue-21660.toml new file mode 100644 index 000000000000..a91f452ccfdd --- /dev/null +++ b/changelog/unreleased/issue-21660.toml @@ -0,0 +1,5 @@ +type="a" +message="Add new configurable default TTL for creating API tokens." + +issues=["21660", "21661", "21662"] +pulls=["21687"] diff --git a/changelog/unreleased/issue-21833.toml b/changelog/unreleased/issue-21833.toml new file mode 100644 index 000000000000..54e3ff9e5968 --- /dev/null +++ b/changelog/unreleased/issue-21833.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Make `latest` metric preserve field type of field used in frontend." + +pulls = ["21833"] +issues = ["21837"] diff --git a/changelog/unreleased/issue-9368.toml b/changelog/unreleased/issue-9368.toml new file mode 100644 index 000000000000..65831e14b8a2 --- /dev/null +++ b/changelog/unreleased/issue-9368.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fixed query validation with parameter usage. Parameters representing field names do not cause 'unknown field' warnings anymore." + +issues = ["9368"] +pulls = ["21469"] diff --git a/changelog/unreleased/issue-9778.toml b/changelog/unreleased/issue-9778.toml new file mode 100644 index 000000000000..8b747c400dcb --- /dev/null +++ b/changelog/unreleased/issue-9778.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Added value sanitation to MarkdownEditor component" + +issues = ["Graylog2/graylog-plugin-enterprise#9778"] +pulls = ["21786", "Graylog2/graylog-plugin-enterprise#9830"] diff --git a/changelog/unreleased/issue-enterprise-6479.toml b/changelog/unreleased/issue-enterprise-6479.toml new file mode 100644 index 000000000000..ba171559b0e9 --- /dev/null +++ b/changelog/unreleased/issue-enterprise-6479.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Sorting pivot values numerically when field is numeric." + +issues = ["Graylog2/graylog-plugin-enterprise#6479"] +pulls = ["20918"] diff --git a/changelog/unreleased/pr-20270.toml b/changelog/unreleased/pr-20270.toml new file mode 100644 index 000000000000..4b71aa4accd6 --- /dev/null +++ b/changelog/unreleased/pr-20270.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix various issues in AMQP input: allow passive queue declaration, fix leaks in thread execution and connection handling, expose client metrics, set proper connection name, clarify parallelQueues option" + +issues = ["6827", "7747"] +pulls = ["20270"] diff --git a/changelog/unreleased/pr-20629.toml b/changelog/unreleased/pr-20629.toml new file mode 100644 index 000000000000..38e89fc1dd4b --- /dev/null +++ b/changelog/unreleased/pr-20629.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix statvfs warnings from oshi on server startup." + +pulls = ["20629"] diff --git a/changelog/unreleased/pr-20658.toml b/changelog/unreleased/pr-20658.toml new file mode 100644 index 000000000000..83b0b6422718 --- /dev/null +++ b/changelog/unreleased/pr-20658.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add PBKDF2 password hash function for FIPS 140-2 support" + +pulls = ["20658"] diff --git a/changelog/unreleased/pr-20678.toml b/changelog/unreleased/pr-20678.toml new file mode 100644 index 000000000000..cdc8c8850d80 --- /dev/null +++ b/changelog/unreleased/pr-20678.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Add missing any/all option to stream rules in routing/intake page" + +issues = ["graylog-plugin-enterprise#8497"] +pulls = ["20678"] diff --git a/changelog/unreleased/pr-20714.toml b/changelog/unreleased/pr-20714.toml new file mode 100644 index 000000000000..f855a42969d8 --- /dev/null +++ b/changelog/unreleased/pr-20714.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Hide stream rule Matching-Type-Switcher when stream is not editable" + +pulls = ["20714"] diff --git a/changelog/unreleased/pr-20748.toml b/changelog/unreleased/pr-20748.toml new file mode 100644 index 000000000000..fb23ce97905a --- /dev/null +++ b/changelog/unreleased/pr-20748.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Optimize widgets warnings" + +issues = ["graylog-plugin-enterprise#8213"] +pulls = ["20748"] diff --git a/changelog/unreleased/pr-20788.toml b/changelog/unreleased/pr-20788.toml new file mode 100644 index 000000000000..4f2273c68552 --- /dev/null +++ b/changelog/unreleased/pr-20788.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Automatically trim newline characters for Fortigate messages received through Syslog inputs." + +issues = ["graylog-plugin-enterprise#8980"] +pulls = ["20788"] diff --git a/changelog/unreleased/pr-20842.toml b/changelog/unreleased/pr-20842.toml new file mode 100644 index 000000000000..dfc17e00883a --- /dev/null +++ b/changelog/unreleased/pr-20842.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Replace datanode insecure_startup configuration with selfsigned_startup, providing full selfsigned SSL setup" + +issues = ["18911"] +pulls = ["20842"] diff --git a/changelog/unreleased/pr-20847.toml b/changelog/unreleased/pr-20847.toml new file mode 100644 index 000000000000..5c49170b6e05 --- /dev/null +++ b/changelog/unreleased/pr-20847.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fixes issue where some event types did not display all actions that should have been available." + +issues = ["graylog-plugin-enterprise#8638"] +pulls = ["20847"] diff --git a/changelog/unreleased/pr-20858.toml b/changelog/unreleased/pr-20858.toml new file mode 100644 index 000000000000..ee89d5307d95 --- /dev/null +++ b/changelog/unreleased/pr-20858.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Enterprise Traffic Graph: Ensure license data is visible on Cloud" + +issues = ["graylog-plugin-enterprise#6055"] +pulls = ["20858"] diff --git a/changelog/unreleased/pr-20910.toml b/changelog/unreleased/pr-20910.toml new file mode 100644 index 000000000000..ca47530886bc --- /dev/null +++ b/changelog/unreleased/pr-20910.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fixing replay search routes when path prefix is present." + +pulls = ["20910"] diff --git a/changelog/unreleased/pr-20912.toml b/changelog/unreleased/pr-20912.toml new file mode 100644 index 000000000000..83bdc2b6892c --- /dev/null +++ b/changelog/unreleased/pr-20912.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Directory compatibility check for datanode inplace migration rejects empty dir" + +issues = [] +pulls = ["20912"] diff --git a/changelog/unreleased/pr-20968.toml b/changelog/unreleased/pr-20968.toml new file mode 100644 index 000000000000..b7a5ea266d36 --- /dev/null +++ b/changelog/unreleased/pr-20968.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Suppress noisy aggregation search error when it is likely due to cloud maintenance." + +issues = ["Graylog2/graylog-plugin-enterprise#8919"] +pulls = ["20968"] diff --git a/changelog/unreleased/pr-20969.toml b/changelog/unreleased/pr-20969.toml new file mode 100644 index 000000000000..e28fbbc6332e --- /dev/null +++ b/changelog/unreleased/pr-20969.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fixing route for AWS integration when path prefix is present." + +pulls = ["20969"] diff --git a/changelog/unreleased/pr-20996.toml b/changelog/unreleased/pr-20996.toml new file mode 100644 index 000000000000..7604912ac899 --- /dev/null +++ b/changelog/unreleased/pr-20996.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Add auto refresh button to events page" + +issues = ["graylog-plugin-enterprise#8661"] +pulls = ["20996"] diff --git a/changelog/unreleased/pr-21031.toml b/changelog/unreleased/pr-21031.toml new file mode 100644 index 000000000000..5d8624c3aec1 --- /dev/null +++ b/changelog/unreleased/pr-21031.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Unified snapshot configuration, role and cache management in datanode" + +pulls = ["21031"] diff --git a/changelog/unreleased/pr-21043.toml b/changelog/unreleased/pr-21043.toml new file mode 100644 index 000000000000..76fe389d5558 --- /dev/null +++ b/changelog/unreleased/pr-21043.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix line wrapping in new sidecar default configurations for filebeat." + +pulls = ["21043"] diff --git a/changelog/unreleased/pr-21044.toml b/changelog/unreleased/pr-21044.toml new file mode 100644 index 000000000000..247b178954ba --- /dev/null +++ b/changelog/unreleased/pr-21044.toml @@ -0,0 +1,4 @@ +type = "added" +message = "Allow sorting events/alerts based on aggregation time range." + +pulls = ["21044"] diff --git a/changelog/unreleased/pr-21056.toml b/changelog/unreleased/pr-21056.toml new file mode 100644 index 000000000000..955df7ba413a --- /dev/null +++ b/changelog/unreleased/pr-21056.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Allowing to filter events based on event definition, priority, key & aggregation time range." + +issues = ["21055"] +pulls = ["21056"] diff --git a/changelog/unreleased/pr-21058.toml b/changelog/unreleased/pr-21058.toml new file mode 100644 index 000000000000..a386c7237892 --- /dev/null +++ b/changelog/unreleased/pr-21058.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Centralized view of Graylog cluster components" + +issues = ["20997"] +pulls = ["21058"] diff --git a/changelog/unreleased/pr-21062.toml b/changelog/unreleased/pr-21062.toml new file mode 100644 index 000000000000..0df01e48c0a3 --- /dev/null +++ b/changelog/unreleased/pr-21062.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Better handling of intermediate CAs in datanode truststore" + +pulls = ["21062"] diff --git a/changelog/unreleased/pr-21089.toml b/changelog/unreleased/pr-21089.toml new file mode 100644 index 000000000000..06be3d486d3a --- /dev/null +++ b/changelog/unreleased/pr-21089.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Adding endpoint for bulk retrieval of event definitions." + +pulls = ["21089"] diff --git a/changelog/unreleased/pr-21123.toml b/changelog/unreleased/pr-21123.toml new file mode 100644 index 000000000000..3bb5a7f62244 --- /dev/null +++ b/changelog/unreleased/pr-21123.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Adjust Event Priority field choices for consistency with Graylog Security." + +pulls = ["21123"] diff --git a/changelog/unreleased/pr-21136.toml b/changelog/unreleased/pr-21136.toml new file mode 100644 index 000000000000..f7d20bdccb16 --- /dev/null +++ b/changelog/unreleased/pr-21136.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Adding advanced field type for event definitions, looking up titles in aggregations and message table." + +pulls = ["21136"] diff --git a/changelog/unreleased/pr-21174.toml b/changelog/unreleased/pr-21174.toml new file mode 100644 index 000000000000..20e5f3b8d27a --- /dev/null +++ b/changelog/unreleased/pr-21174.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Prevent accidental deletion of the default system index set. " + +pulls = ["21174"] +issues = ["Graylog2/graylog-plugin-enterprise#5904"] diff --git a/changelog/unreleased/pr-21197.toml b/changelog/unreleased/pr-21197.toml new file mode 100644 index 000000000000..89604ad1f796 --- /dev/null +++ b/changelog/unreleased/pr-21197.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Add archive restore retry on mapper parsing exception." + +issues = ["graylog-plugin-enterprise#9208"] +pulls = ["21197", "Graylog2/graylog-plugin-enterprise#9413"] diff --git a/changelog/unreleased/pr-21205.toml b/changelog/unreleased/pr-21205.toml new file mode 100644 index 000000000000..44cd47bce32c --- /dev/null +++ b/changelog/unreleased/pr-21205.toml @@ -0,0 +1,8 @@ +type = "a" +message = "Enable defining required permissions for navigation web interface plugin." + +pulls = ["21205"] +details.user = """ +Before it was only possible to define required permissions for a navigation dropdown item. +""" + diff --git a/changelog/unreleased/pr-21206.toml b/changelog/unreleased/pr-21206.toml new file mode 100644 index 000000000000..dac3a9f2ffc7 --- /dev/null +++ b/changelog/unreleased/pr-21206.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Filters system event definitions from list of entities that can be exported in a content pack." + +pulls = ["21206"] +issues = ["21166"] diff --git a/changelog/unreleased/pr-21208.toml b/changelog/unreleased/pr-21208.toml new file mode 100644 index 000000000000..eb3b0b8ff1b8 --- /dev/null +++ b/changelog/unreleased/pr-21208.toml @@ -0,0 +1,4 @@ +type = "f" +message = "batching request for index block status if the combined length of the indices exceed the max possible URL length " + +pulls = ["21208"] diff --git a/changelog/unreleased/pr-21217.toml b/changelog/unreleased/pr-21217.toml new file mode 100644 index 000000000000..2bab952a1e45 --- /dev/null +++ b/changelog/unreleased/pr-21217.toml @@ -0,0 +1,4 @@ +type = "f" +message = "Fixed bug where inputs with multiple encrypted configuration values could not be saved." + +pulls = ["21217"] diff --git a/changelog/unreleased/pr-21238.toml b/changelog/unreleased/pr-21238.toml new file mode 100644 index 000000000000..6924c8105d84 --- /dev/null +++ b/changelog/unreleased/pr-21238.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Enabling bulk actions for event definitions." + +pulls = ["21238"] diff --git a/changelog/unreleased/pr-21262.toml b/changelog/unreleased/pr-21262.toml new file mode 100644 index 000000000000..fa322fc5c064 --- /dev/null +++ b/changelog/unreleased/pr-21262.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Adding 'Replay Search' Bulk action to alerts & events." + +pulls = ["21262"] diff --git a/changelog/unreleased/pr-21286.toml b/changelog/unreleased/pr-21286.toml new file mode 100644 index 000000000000..f5c0ca7f77fa --- /dev/null +++ b/changelog/unreleased/pr-21286.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix `org.graylog2.outputs.ElasticSearchOutput.writes` metric." + +pulls = ["21286"] diff --git a/changelog/unreleased/pr-21349.toml b/changelog/unreleased/pr-21349.toml new file mode 100644 index 000000000000..9434dec74f64 --- /dev/null +++ b/changelog/unreleased/pr-21349.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Fix missing Query Parameters in Event Definitions in Content Packs." + +issues = ["Graylog2/graylog-plugin-enterprise#9274"] +pulls = ["21349"] diff --git a/changelog/unreleased/pr-21359.toml b/changelog/unreleased/pr-21359.toml new file mode 100644 index 000000000000..f9728c836250 --- /dev/null +++ b/changelog/unreleased/pr-21359.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Fix error when clicking the Replay search link in event definition summary in edit modal on security events page" + +issues = ["graylog-plugin-enterprise#9289"] +pulls = ["21359"] diff --git a/changelog/unreleased/pr-21367.toml b/changelog/unreleased/pr-21367.toml new file mode 100644 index 000000000000..cf04c915e64e --- /dev/null +++ b/changelog/unreleased/pr-21367.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Adding missing API descriptions on method parameters for Simple Scripting API." + +pulls = ["21367"] +issues = ["20821"] diff --git a/changelog/unreleased/pr-21376.toml b/changelog/unreleased/pr-21376.toml new file mode 100644 index 000000000000..55cb519c38f2 --- /dev/null +++ b/changelog/unreleased/pr-21376.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Improve readability of filter preview error message in event definition wizard (dark mode)." + +issues = ["Graylog2/graylog-plugin-enterprise#9336"] +pulls = ["21376"] diff --git a/changelog/unreleased/pr-21454.toml b/changelog/unreleased/pr-21454.toml new file mode 100644 index 000000000000..5dc9dec2667d --- /dev/null +++ b/changelog/unreleased/pr-21454.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Added metrics to the shared \"scheduler\" and \"daemonScheduler\" thread pools." + +pulls = ["21454"] diff --git a/changelog/unreleased/pr-21456.toml b/changelog/unreleased/pr-21456.toml new file mode 100644 index 000000000000..011ac2157b97 --- /dev/null +++ b/changelog/unreleased/pr-21456.toml @@ -0,0 +1,5 @@ +type = "added" +message = "Added escape character handling to the key_value pipeline function." + +pulls = ["21456", "21556"] +issues = ["graylog-plugin-enterprise#9552"] diff --git a/changelog/unreleased/pr-21467.toml b/changelog/unreleased/pr-21467.toml new file mode 100644 index 000000000000..d0a80594a6e5 --- /dev/null +++ b/changelog/unreleased/pr-21467.toml @@ -0,0 +1,5 @@ +type = "f" +message = "UI Framework for deprecating pipeline functions" + +pulls = ["21467"] +issues = ["19287"] diff --git a/changelog/unreleased/pr-21493.toml b/changelog/unreleased/pr-21493.toml new file mode 100644 index 000000000000..54ecb04b1e7b --- /dev/null +++ b/changelog/unreleased/pr-21493.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Upgraded MongoJack dependency to `5.0.2`. This might require changes to third-party plugins. Please check Graylog upgrade notes." + +pulls = ["21493"] diff --git a/changelog/unreleased/pr-21536.toml b/changelog/unreleased/pr-21536.toml new file mode 100644 index 000000000000..ad828aa91002 --- /dev/null +++ b/changelog/unreleased/pr-21536.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Add input for OpenTelemetry logs." + +pulls = ["21536"] diff --git a/changelog/unreleased/pr-21538.toml b/changelog/unreleased/pr-21538.toml new file mode 100644 index 000000000000..a40ebd0787ff --- /dev/null +++ b/changelog/unreleased/pr-21538.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Upgrade AWS Kinesis client libary to version 6.1.0 to fix potential shard processing issues." + +issues = ["21451"] +pulls = ["21538"] diff --git a/changelog/unreleased/pr-21579.toml b/changelog/unreleased/pr-21579.toml new file mode 100644 index 000000000000..950b575cc407 --- /dev/null +++ b/changelog/unreleased/pr-21579.toml @@ -0,0 +1,4 @@ +type = "d" +message = "Deprecated Palo Alto Networks version 8.x and 9.x inputs" + +pulls = ["21579"] diff --git a/changelog/unreleased/pr-21622.toml b/changelog/unreleased/pr-21622.toml new file mode 100644 index 000000000000..f742078b8fb7 --- /dev/null +++ b/changelog/unreleased/pr-21622.toml @@ -0,0 +1,5 @@ +type = "fixed" +message = "Don't add search role if node_roles are user-defined in datanode.conf" + +issues = ["graylog-plugin-enterprise#9704"] +pulls = ["21622"] diff --git a/changelog/unreleased/pr-21632.toml b/changelog/unreleased/pr-21632.toml new file mode 100644 index 000000000000..7c426a791f3d --- /dev/null +++ b/changelog/unreleased/pr-21632.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Add more logging around CA upload in preflight, support more private key types." + +issues = [] +pulls = ["21632"] diff --git a/changelog/unreleased/pr-21755.toml b/changelog/unreleased/pr-21755.toml new file mode 100644 index 000000000000..8262a346ffd2 --- /dev/null +++ b/changelog/unreleased/pr-21755.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Refer to official AWS Kinesis/Cloudwatch input for permissions on first page of input setup." + +pulls = ["21755"] diff --git a/changelog/unreleased/pr-21764.toml b/changelog/unreleased/pr-21764.toml new file mode 100644 index 000000000000..8d5013e5e185 --- /dev/null +++ b/changelog/unreleased/pr-21764.toml @@ -0,0 +1,5 @@ +type = "c" +message = "AmqpTransport: Log root cause of IOException, if possible" + +issues = [""] +pulls = ["21764"] diff --git a/changelog/unreleased/pr-21773.toml b/changelog/unreleased/pr-21773.toml new file mode 100644 index 000000000000..6efa1c85d8b5 --- /dev/null +++ b/changelog/unreleased/pr-21773.toml @@ -0,0 +1,5 @@ +type="a" +message="Allow specifying a TTL when creating a new API-token. Also, automatically delete expired tokens." + +issues=["21440"] +pulls=["21773"] diff --git a/changelog/unreleased/pr-21783.toml b/changelog/unreleased/pr-21783.toml new file mode 100644 index 000000000000..641c9a806780 --- /dev/null +++ b/changelog/unreleased/pr-21783.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Better handling of keystore aliases in datanode." + +issues = [] +pulls = ["21783"] diff --git a/changelog/unreleased/pr-21795.toml b/changelog/unreleased/pr-21795.toml new file mode 100644 index 000000000000..57ee572e16d4 --- /dev/null +++ b/changelog/unreleased/pr-21795.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix escaping of response headers in API browser." + +pulls = ["21795"] diff --git a/changelog/unreleased/pr-21811.toml b/changelog/unreleased/pr-21811.toml new file mode 100644 index 000000000000..43ee41436dd7 --- /dev/null +++ b/changelog/unreleased/pr-21811.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Showing chart for alerts/events over time on alerts page." + +pulls = ["21811"] diff --git a/changelog/unreleased/pr-21828.toml b/changelog/unreleased/pr-21828.toml new file mode 100644 index 000000000000..71a9fec46fae --- /dev/null +++ b/changelog/unreleased/pr-21828.toml @@ -0,0 +1,4 @@ +type = "c" +message = "Better self-signed certificate check in datanode keystore" + +pulls = ["21811"] diff --git a/changelog/unreleased/pr-21835.toml b/changelog/unreleased/pr-21835.toml new file mode 100644 index 000000000000..468960fd2d80 --- /dev/null +++ b/changelog/unreleased/pr-21835.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Adding an interface to tag inputs for enterprise functionality, extracting scripting API methods into a service for reuse" + +pulls = ["21835", "graylog-plugin-enterprise#9869"] diff --git a/changelog/unreleased/pr-21858.toml b/changelog/unreleased/pr-21858.toml new file mode 100644 index 000000000000..6f05bc64ed27 --- /dev/null +++ b/changelog/unreleased/pr-21858.toml @@ -0,0 +1,5 @@ +type = "f" +message = "Improved validation and handling of uploaded certficate authorities." + +issues = ["21832"] +pulls = ["21858"] diff --git a/changelog/unreleased/pr-21865.toml b/changelog/unreleased/pr-21865.toml new file mode 100644 index 000000000000..9d1a03e2619e --- /dev/null +++ b/changelog/unreleased/pr-21865.toml @@ -0,0 +1,4 @@ +type = "a" +message = "Make type specific field value renderer pluggable." + +pulls = ["21795"] diff --git a/changelog/unreleased/pr-21887.toml b/changelog/unreleased/pr-21887.toml new file mode 100644 index 000000000000..7a470c830bf7 --- /dev/null +++ b/changelog/unreleased/pr-21887.toml @@ -0,0 +1,5 @@ +type = "a" +message = "Added system notification with link to upgrade page if Graylog server and one of the connected Data Nodes' versions do not match." + +issues = ["21701"] +pulls = ["21887"] diff --git a/changelog/unreleased/pr-21927.toml b/changelog/unreleased/pr-21927.toml new file mode 100644 index 000000000000..1e87e0528a0f --- /dev/null +++ b/changelog/unreleased/pr-21927.toml @@ -0,0 +1,4 @@ +type = "fixed" +message = "Fix display of encrypted configuration values for inputs." + +pulls = ["21927"] diff --git a/changelog/unreleased/pr-21932.toml b/changelog/unreleased/pr-21932.toml new file mode 100644 index 000000000000..4e900fa00a59 --- /dev/null +++ b/changelog/unreleased/pr-21932.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Admin user can no longer cancel search jobs of other users." + +issues = ["Graylog2/graylog-plugin-enterprise#9926"] +pulls = ["21932"] diff --git a/changelog/unreleased/pr-21943.toml b/changelog/unreleased/pr-21943.toml new file mode 100644 index 000000000000..779ff5305dd1 --- /dev/null +++ b/changelog/unreleased/pr-21943.toml @@ -0,0 +1,5 @@ +type = "c" +message = "Generated certificates now use 4096 bit keys." + +issues = ["graylog-plugin-enterprise#9951"] +pulls = ["21943"] diff --git a/changelog/unreleased/pr-21959.toml b/changelog/unreleased/pr-21959.toml new file mode 100644 index 000000000000..4510577a85a4 --- /dev/null +++ b/changelog/unreleased/pr-21959.toml @@ -0,0 +1,5 @@ +type = "c" +message = "More concise information on when/how to change permissions for the OpenSearch data directory in the Data Node migration wizard. ." + +issues = ["Graylog2/graylog-plugin-enterprise#9231"] +pulls = ["21959"] diff --git a/config/forbidden-apis/netty3.txt b/config/forbidden-apis/netty3.txt deleted file mode 100644 index e5aad186fe06..000000000000 --- a/config/forbidden-apis/netty3.txt +++ /dev/null @@ -1 +0,0 @@ -org.jboss.netty.** @ Migrate to Netty 4.x diff --git a/config/forbidden-apis/signatures.txt b/config/forbidden-apis/signatures.txt deleted file mode 100644 index 370754a18504..000000000000 --- a/config/forbidden-apis/signatures.txt +++ /dev/null @@ -1,89 +0,0 @@ -@defaultMessage Use a custom thread factory to ensure proper thread naming. -java.util.concurrent.Executors#defaultThreadFactory() -java.util.concurrent.Executors#newCachedThreadPool() -java.util.concurrent.Executors#newFixedThreadPool(int) -java.util.concurrent.Executors#newScheduledThreadPool(int) -java.util.concurrent.Executors#newSingleThreadExecutor() -java.util.concurrent.Executors#newSingleThreadScheduledExecutor() -java.util.concurrent.Executors#privilegedThreadFactory() - -@defaultMessage Constructing a DateTime without a time zone is dangerous -org.joda.time.DateTime#() -org.joda.time.DateTime#(long) -org.joda.time.DateTime#(int, int, int, int, int) -org.joda.time.DateTime#(int, int, int, int, int, int) -org.joda.time.DateTime#(int, int, int, int, int, int, int) -org.joda.time.DateTime#now() -org.joda.time.DateTimeZone#getDefault() - -@defaultMessage Please do not try to stop the world -java.lang.System#gc() - -java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars -java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars - -@defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead. -java.lang.Object#wait() -java.lang.Object#wait(long) -java.lang.Object#wait(long,int) -java.lang.Object#notify() -java.lang.Object#notifyAll() - -@defaultMessage Beware of the behavior of this method on MIN_VALUE -java.lang.Math#abs(int) -java.lang.Math#abs(long) - -@defaultMessage Use Channels.* methods to write to channels. Do not write directly. -java.nio.channels.WritableByteChannel#write(java.nio.ByteBuffer) -java.nio.channels.FileChannel#write(java.nio.ByteBuffer, long) -java.nio.channels.GatheringByteChannel#write(java.nio.ByteBuffer[], int, int) -java.nio.channels.GatheringByteChannel#write(java.nio.ByteBuffer[]) -java.nio.channels.ReadableByteChannel#read(java.nio.ByteBuffer) -java.nio.channels.ScatteringByteChannel#read(java.nio.ByteBuffer[]) -java.nio.channels.ScatteringByteChannel#read(java.nio.ByteBuffer[], int, int) -java.nio.channels.FileChannel#read(java.nio.ByteBuffer, long) - -@defaultMessage Convert to URI -java.net.URL#getPath() -java.net.URL#getFile() - -@defaultMessage Use java.nio.file instead of java.io.File API -# java.util.jar.JarFile -# java.util.zip.ZipFile -# java.io.File -# java.io.FileInputStream -# java.io.FileOutputStream -java.io.PrintStream#(java.lang.String,java.lang.String) -java.io.PrintWriter#(java.lang.String,java.lang.String) -java.util.Formatter#(java.lang.String,java.lang.String,java.util.Locale) -java.io.RandomAccessFile -# java.nio.file.Path#toFile() - -@defaultMessage Specify a location for the temp file/directory instead. -java.nio.file.Files#createTempDirectory(java.lang.String,java.nio.file.attribute.FileAttribute[]) -java.nio.file.Files#createTempFile(java.lang.String,java.lang.String,java.nio.file.attribute.FileAttribute[]) - -com.google.common.collect.Iterators#emptyIterator() @ Use Collections.emptyIterator instead - -@defaultMessage Don't use java serialization - this can break BWC without noticing it -java.io.ObjectOutputStream -java.io.ObjectOutput -java.io.ObjectInputStream -java.io.ObjectInput - -@defaultMessage avoid DNS lookups by accident: if you have a valid reason, then @SuppressWarnings with that reason so its completely clear -java.net.InetAddress#getHostName() -java.net.InetAddress#getCanonicalHostName() - -java.net.InetSocketAddress#getHostName() @ Use getHostString() instead, which avoids a DNS lookup - -@defaultMessage Avoid unchecked warnings by using Collections#empty(List|Map|Set) methods -java.util.Collections#EMPTY_LIST -java.util.Collections#EMPTY_MAP -java.util.Collections#EMPTY_SET - -java.util.Collections#shuffle(java.util.List) @ Use java.util.Collections#shuffle(java.util.List, java.util.Random) with a reproducible source of randomness - -edu.emory.mathcs.backport.java.util.** @ Use java.util.* - -com.google.common.base.Charsets @ Use java.nio.charset.StandardCharsets \ No newline at end of file diff --git a/config/pmd-rules.xml b/config/pmd-rules.xml new file mode 100644 index 000000000000..991971e55a01 --- /dev/null +++ b/config/pmd-rules.xml @@ -0,0 +1,31 @@ + + + + + Custom Graylog-specific rules + + + + + Prohibit usage of com.mongodb.DuplicateKeyException + + 1 + + + + + + + + + + diff --git a/data-node/LICENSE b/data-node/LICENSE new file mode 100644 index 000000000000..4e1383df1ee4 --- /dev/null +++ b/data-node/LICENSE @@ -0,0 +1,557 @@ + Server Side Public License + VERSION 1, OCTOBER 16, 2018 + + Copyright © 2018 MongoDB, Inc. + + Everyone is permitted to copy and distribute verbatim copies of this + license document, but changing it is not allowed. + + TERMS AND CONDITIONS + + 0. Definitions. + + “This License” refers to Server Side Public License. + + “Copyright” also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + “The Program” refers to any copyrightable work licensed under this + License. Each licensee is addressed as “you”. “Licensees” and + “recipients” may be individuals or organizations. + + To “modify” a work means to copy from or adapt all or part of the work in + a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a “modified version” of the + earlier work or a work “based on” the earlier work. + + A “covered work” means either the unmodified Program or a work based on + the Program. + + To “propagate” a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To “convey” a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through a + computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays “Appropriate Legal Notices” to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties + are provided), that licensees may convey the work under this License, and + how to view a copy of this License. If the interface presents a list of + user commands or options, such as a menu, a prominent item in the list + meets this criterion. + + 1. Source Code. + + The “source code” for a work means the preferred form of the work for + making modifications to it. “Object code” means any non-source form of a + work. + + A “Standard Interface” means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. The “System + Libraries” of an executable work include anything, other than the work as + a whole, that (a) is included in the normal form of packaging a Major + Component, but which is not part of that Major Component, and (b) serves + only to enable use of the work with that Major Component, or to implement + a Standard Interface for which an implementation is available to the + public in source code form. A “Major Component”, in this context, means a + major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or + a compiler used to produce the work, or an object code interpreter used + to run it. + + The “Corresponding Source” for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control + those activities. However, it does not include the work's System + Libraries, or general-purpose tools or generally available free programs + which are used unmodified in performing those activities but which are + not part of the work. For example, Corresponding Source includes + interface definition files associated with source files for the work, and + the source code for shared libraries and dynamically linked subprograms + that the work is specifically designed to require, such as by intimate + data communication or control flow between those subprograms and other + parts of the work. + + The Corresponding Source need not include anything that users can + regenerate automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program, subject to section 13. The + output from running a covered work is covered by this License only if the + output, given its content, constitutes a covered work. This License + acknowledges your rights of fair use or other equivalent, as provided by + copyright law. Subject to section 13, you may make, run and propagate + covered works that you do not convey, without conditions so long as your + license otherwise remains in force. You may convey covered works to + others for the sole purpose of having them make modifications exclusively + for you, or provide you with facilities for running those works, provided + that you comply with the terms of this License in conveying all + material for which you do not control copyright. Those thus making or + running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them + from making any copies of your copyrighted material outside their + relationship with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article 11 + of the WIPO copyright treaty adopted on 20 December 1996, or similar laws + prohibiting or restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the + covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's users, + your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; keep + intact all notices stating that this License and any non-permissive terms + added in accord with section 7 apply to the code; keep intact all notices + of the absence of any warranty; and give all recipients a copy of this + License along with the Program. You may charge any price or no price for + each copy that you convey, and you may offer support or warranty + protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the terms + of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to “keep intact all + notices”. + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the + whole of the work, and all its parts, regardless of how they are + packaged. This License gives no permission to license the work in any + other way, but it does not invalidate such permission if you have + separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work + need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, and + which are not combined with it such as to form a larger program, in or on + a volume of a storage or distribution medium, is called an “aggregate” if + the compilation and its resulting copyright are not used to limit the + access or legal rights of the compilation's users beyond what the + individual works permit. Inclusion of a covered work in an aggregate does + not cause this License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these + ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily + used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you + offer spare parts or customer support for that product model, to give + anyone who possesses the object code either (1) a copy of the + Corresponding Source for all the software in the product that is + covered by this License, on a durable physical medium customarily used + for software interchange, for a price no more than your reasonable cost + of physically performing this conveying of source, or (2) access to + copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This alternative is + allowed only occasionally and noncommercially, and only if you received + the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to copy + the object code is a network server, the Corresponding Source may be on + a different server (operated by you or a third party) that supports + equivalent copying facilities, provided you maintain clear directions + next to the object code saying where to find the Corresponding Source. + Regardless of what server hosts the Corresponding Source, you remain + obligated to ensure that it is available for as long as needed to + satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of + the work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be included + in conveying the object code work. + + A “User Product” is either (1) a “consumer product”, which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, “normally used” refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + “Installation Information” for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part + of a transaction in which the right of possession and use of the User + Product is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the + Installation Information. But this requirement does not apply if neither + you nor any third party retains the ability to install modified object + code on the User Product (for example, the work has been installed in + ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access + to a network may be denied when the modification itself materially + and adversely affects the operation of the network or violates the + rules and protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + “Additional permissions” are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall be + treated as though they were included in this License, to the extent that + they are valid under applicable law. If additional permissions apply only + to part of the Program, that part may be used separately under those + permissions, but the entire Program remains governed by this License + without regard to the additional permissions. When you convey a copy of + a covered work, you may at your option remove any additional permissions + from that copy, or from any part of it. (Additional permissions may be + written to require their own removal in certain cases when you modify the + work.) You may place additional permissions on material, added by you to + a covered work, for which you have or can give appropriate copyright + permission. + + Notwithstanding any other provision of this License, for material you add + to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material + by anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any + liability that these contractual assumptions directly impose on those + licensors and authors. + + All other non-permissive additional terms are considered “further + restrictions” within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you + may add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. Additional terms, permissive or non-permissive, may be + stated in the form of a separately written license, or stated as + exceptions; the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or modify + it is void, and will automatically terminate your rights under this + License (including any patent licenses granted under the third paragraph + of section 11). + + However, if you cease all violation of this License, then your license + from a particular copyright holder is reinstated (a) provisionally, + unless and until the copyright holder explicitly and finally terminates + your license, and (b) permanently, if the copyright holder fails to + notify you of the violation by some reasonable means prior to 60 days + after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and + you cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a + copy likewise does not require acceptance. However, nothing other than + this License grants you permission to propagate or modify any covered + work. These actions infringe copyright if you do not accept this License. + Therefore, by modifying or propagating a covered work, you indicate your + acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives + a license from the original licensors, to run, modify and propagate that + work, subject to this License. You are not responsible for enforcing + compliance by third parties with this License. + + An “entity transaction” is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work + the party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can + get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted + under this License, and you may not initiate litigation (including a + cross-claim or counterclaim in a lawsuit) alleging that any patent claim + is infringed by making, using, selling, offering for sale, or importing + the Program or any portion of it. + + 11. Patents. + + A “contributor” is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The work + thus licensed is called the contributor's “contributor version”. + + A contributor's “essential patent claims” are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this + License, of making, using, or selling its contributor version, but do not + include claims that would be infringed only as a consequence of further + modification of the contributor version. For purposes of this definition, + “control” includes the right to grant patent sublicenses in a manner + consistent with the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to make, + use, sell, offer for sale, import and otherwise run, modify and propagate + the contents of its contributor version. + + In the following three paragraphs, a “patent license” is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To “grant” such a patent license to a party + means to make such an agreement or commitment not to enforce a patent + against the party. + + If you convey a covered work, knowingly relying on a patent license, and + the Corresponding Source of the work is not available for anyone to copy, + free of charge and under the terms of this License, through a publicly + available network server or other readily accessible means, then you must + either (1) cause the Corresponding Source to be so available, or (2) + arrange to deprive yourself of the benefit of the patent license for this + particular work, or (3) arrange, in a manner consistent with the + requirements of this License, to extend the patent license to downstream + recipients. “Knowingly relying” means you have actual knowledge that, but + for the patent license, your conveying the covered work in a country, or + your recipient's use of the covered work in a country, would infringe + one or more identifiable patents in that country that you have reason + to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties receiving + the covered work authorizing them to use, propagate, modify or convey a + specific copy of the covered work, then the patent license you grant is + automatically extended to all recipients of the covered work and works + based on it. + + A patent license is “discriminatory” if it does not include within the + scope of its coverage, prohibits the exercise of, or is conditioned on + the non-exercise of one or more of the rights that are specifically + granted under this License. You may not convey a covered work if you are + a party to an arrangement with a third party that is in the business of + distributing software, under which you make payment to the third party + based on the extent of your activity of conveying the work, and under + which the third party grants, to any of the parties who would receive the + covered work from you, a discriminatory patent license (a) in connection + with copies of the covered work conveyed by you (or copies made from + those copies), or (b) primarily for and in connection with specific + products or compilations that contain the covered work, unless you + entered into that arrangement, or that patent license was granted, prior + to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot use, + propagate or convey a covered work so as to satisfy simultaneously your + obligations under this License and any other pertinent obligations, then + as a consequence you may not use, propagate or convey it at all. For + example, if you agree to terms that obligate you to collect a royalty for + further conveying from those to whom you convey the Program, the only way + you could satisfy both those terms and this License would be to refrain + entirely from conveying the Program. + + 13. Offering the Program as a Service. + + If you make the functionality of the Program or a modified version + available to third parties as a service, you must make the Service Source + Code available via network download to everyone at no charge, under the + terms of this License. Making the functionality of the Program or + modified version available to third parties as a service includes, + without limitation, enabling third parties to interact with the + functionality of the Program or modified version remotely through a + computer network, offering a service the value of which entirely or + primarily derives from the value of the Program or modified version, or + offering a service that accomplishes for users the primary purpose of the + Program or modified version. + + “Service Source Code” means the Corresponding Source for the Program or + the modified version, and the Corresponding Source for all programs that + you use to make the Program or modified version available as a service, + including, without limitation, management software, user interfaces, + application program interfaces, automation software, monitoring software, + backup software, storage software and hosting software, all such that a + user could run an instance of the service using the Service Source Code + you make available. + + 14. Revised Versions of this License. + + MongoDB, Inc. may publish revised and/or new versions of the Server Side + Public License from time to time. Such new versions will be similar in + spirit to the present version, but may differ in detail to address new + problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the Server Side Public + License “or any later version” applies to it, you have the option of + following the terms and conditions either of that numbered version or of + any later version published by MongoDB, Inc. If the Program does not + specify a version number of the Server Side Public License, you may + choose any version ever published by MongoDB, Inc. + + If the Program specifies that a proxy can decide which future versions of + the Server Side Public License can be used, that proxy's public statement + of acceptance of a version permanently authorizes you to choose that + version for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING + ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF + THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU + OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. + + END OF TERMS AND CONDITIONS diff --git a/data-node/README.md b/data-node/README.md new file mode 100644 index 000000000000..b4dc66646fd9 --- /dev/null +++ b/data-node/README.md @@ -0,0 +1 @@ +First version, additional info to follow. diff --git a/data-node/UPGRADING.md b/data-node/UPGRADING.md new file mode 100644 index 000000000000..2478eac9820a --- /dev/null +++ b/data-node/UPGRADING.md @@ -0,0 +1 @@ +First version, no upgrading info yet. diff --git a/data-node/bin/graylog-datanode.sh b/data-node/bin/graylog-datanode.sh new file mode 100755 index 000000000000..5ac82c180d48 --- /dev/null +++ b/data-node/bin/graylog-datanode.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +set -eo pipefail + +# Resolve links - $0 may be a softlink +DATANODE_BIN="$0" + +while [ -h "$DATANODE_BIN" ]; do + ls=$(ls -ld "$DATANODE_BIN") + link=$(expr "$ls" : '.*-> \(.*\)$') + if expr "$link" : '/.*' > /dev/null; then + DATANODE_BIN="$link" + else + DATANODE_BIN=$(dirname "$DATANODE_BIN")/"$link" + fi +done + +DATANODE_ROOT="$(dirname "$(dirname "$DATANODE_BIN")")" +DATANODE_DEFAULT_JAR="${DATANODE_ROOT}/graylog-datanode.jar" +DATANODE_JVM_OPTIONS_FILE="${DATANODE_JVM_OPTIONS_FILE:-"$DATANODE_ROOT/config/jvm.options"}" +DATANODE_LOG4J_CONFIG_FILE="${DATANODE_LOG4J_CONFIG_FILE:-"$DATANODE_ROOT/config/log4j2.xml"}" + +DATANODE_PARSED_JAVA_OPTS="" +if [ -f "$DATANODE_JVM_OPTIONS_FILE" ]; then + DATANODE_PARSED_JAVA_OPTS=$(grep '^-' "$DATANODE_JVM_OPTIONS_FILE" | tr '\n' ' ') +fi + +DATANODE_JAVA_OPTS="-Dlog4j.configurationFile=file://$(readlink -f "${DATANODE_LOG4J_CONFIG_FILE}") ${DATANODE_PARSED_JAVA_OPTS% } $JAVA_OPTS" +DATANODE_JAR=${DATANODE_JAR:="$DATANODE_DEFAULT_JAR"} + +JAVA_CMD="${JAVA_CMD}" + +if [ -z "$JAVA_CMD" ]; then + if [ -d "$DATANODE_ROOT/jvm" ]; then + JAVA_HOME="$DATANODE_ROOT/jvm" + else + echo "ERROR: Java is not installed." + exit 1 + fi +fi + +if [ -n "$JAVA_HOME" ]; then + java_cmd="${JAVA_HOME}/bin/java" + + if [ -x "$java_cmd" ]; then + JAVA_CMD="$java_cmd" + else + echo "$java_cmd not executable or doesn't exist" + exit 1 + fi +fi + +exec "$JAVA_CMD" ${DATANODE_JAVA_OPTS% } -jar "$DATANODE_JAR" "$@" diff --git a/data-node/config/jvm.options b/data-node/config/jvm.options new file mode 100644 index 000000000000..974f48ea916d --- /dev/null +++ b/data-node/config/jvm.options @@ -0,0 +1,20 @@ +# JVM Heap Settings +# +# It's recommended to set both values to the same size to avoid stop-the-world +# GC pauses during resize. +-Xms1g +-Xmx1g + +# General JVM Settings +# +# Garbage collector +-XX:+UseG1GC + +# Ensure full stack traces for debugging +-XX:-OmitStackTraceInFastThrow + +# Allow usage of experimental JVM options +-XX:+UnlockExperimentalVMOptions + +# Avoid endless loop with some TLSv1.3 implementations +-Djdk.tls.acknowledgeCloseNotify=true diff --git a/data-node/config/log4j2.xml b/data-node/config/log4j2.xml new file mode 100644 index 000000000000..41abc7653ae8 --- /dev/null +++ b/data-node/config/log4j2.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/data-node/migration/Manual-Migration.md b/data-node/migration/Manual-Migration.md new file mode 100644 index 000000000000..e5c87052f5b9 --- /dev/null +++ b/data-node/migration/Manual-Migration.md @@ -0,0 +1,174 @@ +# Manual Migration Guide + +**Caveat**: preliminary - and only to have a minimal basis to find out which modifications are necessary at the current codebase +and to research/test possible improvements for a better user experience or find out the limits what we can actually do. +Some of these steps could be useful in the future for PSO or support to migrate manually or fix problems that occurred during migration. + +**Also: if you try to use this guide for a manual migration of a real prod system, all the preliniaries regarding certificates etc. probably +don't match your use case. Please adjust accordingly!** + +#### Testing during development + +Create/include a locally generated Docker image for Graylog or the DataNode by using the `create-docker-images.sh` + +At the time of writing this, if you test/develop on macOS and you're struggling with setting `vm.max_map_count` for Docker, +you're running the latest Docker version, which is incompatible with macOS Sonoma. Your only chance is to downgrade Docker. + +## Things to watch out for +The following is a list of items that I did not put in much effort/time to test but should be kept in mind to avoid potential issues: +- keep hostnames as they are +- keep the cluster name + +Otherwise you might create issues with existing meta data in OpenSearch. + + +In a lot of cases, missing a step or having an error, you have no other chance as to start over completely. Which might not be +possible in PROD environments. So make sure you have backups ;-) + +## Migrating an existing OpenSearch 2.x or 1.3.x cluster + +This is a preliminary guide to migrate an existing OpenSearch 2.x or 1.3.x cluster into a DataNode cluster. +Basis was the `cluster` from our `docker compose` github repository. But we only use one MongoDB and one Graylog node. + +It is based on Docker to be able to reproduce all the steps consistently. Real installations will surely +differ. The steps can be also used to migrate a OS packages install. + +## Migrating Elasticsearch 7.10 + +see additional info at the end of this document + +### Contents of this directory: + +- `docker-compose.yml` - the Docker compose file we're using and modifying in between steps +- `env` - the Environment variables read by `docker-compose.yml` for hashes and the admin password +- `cert.sh` - a script to create certificates for the OpenSearch cluster +- `custom-opensearch.yml` - certificate related OpenSearch configuration +- `opensearch-security-config.yml` - the OpenSearch security config that we`re going to replace during a migration +- `datanode-security-config.yml` - the default DataNode security config that we'll use as a replacement + +### Migration Steps + +You should be able to always start over by doing a `docker compose down -v` and taking back any modifications on the files. + +#### The file `cert.sh` +Use the `cert.sh` script to generate the certificate certificates for the OpenSearch cluster. Use "password" as the password +for certificate related stuff so that it matches the other scripts/files. + +For a cluster that uses no certificates, don't create certificates. Remove the following lines +from each OpenSearch service in `docker-compose.yml`: + +``` +- "./root-ca.pem:/usr/share/opensearch/config/root-ca.pem" +- "./node?.pem:/usr/share/opensearch/config/node.pem" +- "./node?-key.pem:/usr/share/opensearch/config/node-key.pem" +- "./admin.pem:/usr/share/opensearch/config/admin.pem" +- "./admin-key.pem:/usr/share/opensearch/config/admin-key.pem" +- "./keystore.jks:/usr/share/opensearch/config/keystore.jks" +- "./custom-opensearch.yml:/usr/share/opensearch/config/opensearch.yml" +``` +Replace the `?` with 1-3 depending on the service. + +And remove the cert config from the graylog service by deleting the following lines: + +``` + GRAYLOG_CA_KEYSTORE_FILE: "/usr/share/graylog/data/keystore.jks" + GRAYLOG_CA_PASSWORD: "password" +``` +and +``` + - "./keystore.jks:/usr/share/graylog/data/keystore.jks" +``` + +#### The file `env` +Modify the `env` file like with the regular docker compose examples that we provide. Rename it to `.env` . + + + +#### The file `datanode-security-config.yml` +Convert the `GRAYLOG_PASSWORD_SECRET` to base64 e.g. by doing `echo "The password secret you chose" | base64` and +put it into line 131 of `datanode-security-config.yml` + +### OpenSearch 1.3.x + +To run this with OpenSearch 1.3.x, replace the used Docker image `2.10.0` with `1.3.1` for all 3 services. + +### Create your containers + +`docker compose create` + +### Start Graylog with OpenSearch + +Start your cluster `docker compose up -d mongodb opensearch1 opensearch2 opensearch3 graylog1`. Create an input, +ingest some data. etc. + +Stop OpenSearch and Graylog with `docker compose stop graylog1 opensearch1 opensearch2 opensearch3`. + +Modify all three OpenSearch services in `docker-compose.yml` to look like this: +``` + - "./opensearch-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" +# - "./datanode-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" +``` + +So basically, switch out the default security config that uses a user/role based model to one that uses JWT authentication +which is the mechanism we use with the DataNode. + +Bring the OpenSearch cluster back on: + +`docker compose create opensearch1 opensearch2 opensearch3` + +`docker compose up -d opensearch1 opensearch2 opensearch3` + +You can check the logs or test via `curl` that OpenSearch is back. + +Use `docker ps` to get the ID of one of the nodes. `docker exec -it bash` into the container. +Run `cd /usr/share/opensearch/plugins/opensearch-security/tools` and `./securityadmin.sh -f /usr/share/opensearch/config/opensearch-security/config.yml -icl -h opensearch1 -nhnv -cacert ../../../config/root-ca.pem -cert ../../../config/admin.pem -key ../../../config/admin-key.pem` +to make OpenSearch reload the security data. + +This step is necessary so that the OpenSearch data directory with the security indices etc. contain the JWT auth data before we attach the data ddirectory to the DataNode. + +Stop OpenSearch using `docker compose stop opensearch1 opensearch2 opensearch3` + +Modify your graylog service so it will connect to DataNodes and show the Preflight config, it should look like this: + +``` +# GRAYLOG_CA_KEYSTORE_FILE: "/usr/share/graylog/data/keystore.jks" +# GRAYLOG_CA_PASSWORD: "password" +# GRAYLOG_ELASTICSEARCH_HOSTS: "https://admin:admin@opensearch1:9200,https://admin:admin@opensearch2:9200,https://admin:admin@opensearch3:9200" + GRAYLOG_ENABLE_PREFLIGHT_WEB: "true" +``` + +Issue `docker compose create graylog1` and `docker compose up -d graylog1 datanode1 datanode2 datanode3`. +Log into the Preflight UI at `http://localhost:9000` with the auth credentials from the end of the logs at `docker compose logs -f graylog1` + +Provision certificates for the DataNodes, they should start up and get green. Resume the Graylog startup. +For now, nothing should happen - this will be addressed shortly in dev. Stop graylog with `docker compose stop graylog1`. + +Disable the Preflight UI in `docker-compose.yml` by adding a `#`: +``` +# GRAYLOG_ENABLE_PREFLIGHT_WEB: "true" +``` +Use `docker compose create graylog1` and `docker compose up -d graylog1` to bring graylog up again. + +Now you should be able the log into graylog at `http://localhost:9000` with your regular credentials and have a running +configuration with DataNodes. + + +## Migrating Elasticsearch 7.10 + +Migration from Elasticsearch 7.10 needs an additional step. ES 7.10 does not understand JWT authentication. +So you have to first migrate to OpenSearch before running the update of the security information. +Look at the supplied `es710-docker-compose.yml` as an example. Please note that except for the servicename, I changed the cluster name +and hostnames etc. to `opensearch`. In a regular setting, it would be the other way around and you would have to pull the +elasticsearch names through the whole process into the DataNode. + +Start the Elasticsearch cluster, add some data etc. `docker compose up -d elasticsearch1 elasticsearch2 elasticsearch3` +stop it again `docker compose stop elasticsearch1 elasticsearch2 elasticsearch3`. + +Start the OpenSearch cluster in place of the Elasticsearch cluster. It points to the same data directory. +`docker compose up -d opensearch1 opensearch2 opensearch3`. The `es710-docker-compose.yml` already points to the +security config with the JWT auth settings. Make sure you added the correct bas64 encoded secret. + +Run the `securityadmin.sh` as described above and just follow the steps for an OpenSearch 1.3 migration as described. + +Please also note that the ElasticSearch example does not contain any certificates for ElasticSearch but uses generated certificates +once you started the OpenSearch cluster. diff --git a/data-node/migration/cert.sh b/data-node/migration/cert.sh new file mode 100644 index 000000000000..295f357dee70 --- /dev/null +++ b/data-node/migration/cert.sh @@ -0,0 +1,56 @@ +#!/bin/sh +# Root CA +openssl genrsa -out root-ca-key.pem 2048 +openssl req -new -x509 -sha256 -key root-ca-key.pem -subj "/C=CA/ST=ONTARIO/L=TORONTO/O=ORG/OU=UNIT/CN=root" -out root-ca.pem -days 730 +# Admin cert +openssl genrsa -out admin-key-temp.pem 2048 +openssl pkcs8 -inform PEM -outform PEM -in admin-key-temp.pem -topk8 -nocrypt -v1 PBE-SHA1-3DES -out admin-key.pem +openssl req -new -key admin-key.pem -subj "/C=CA/ST=ONTARIO/L=TORONTO/O=ORG/OU=UNIT/CN=A" -out admin.csr +openssl x509 -req -in admin.csr -CA root-ca.pem -CAkey root-ca-key.pem -CAcreateserial -sha256 -out admin.pem -days 730 +# Node cert 1 +openssl genrsa -out node1-key-temp.pem 2048 +openssl pkcs8 -inform PEM -outform PEM -in node1-key-temp.pem -topk8 -nocrypt -v1 PBE-SHA1-3DES -out node1-key.pem +openssl req -new -key node1-key.pem -subj "/C=CA/ST=ONTARIO/L=TORONTO/O=ORG/OU=UNIT/CN=opensearch1" -out node1.csr +echo 'subjectAltName=DNS:opensearch1' > node1.ext +openssl x509 -req -in node1.csr -CA root-ca.pem -CAkey root-ca-key.pem -CAcreateserial -sha256 -out node1.pem -days 730 -extfile node1.ext +# Node cert 2 +openssl genrsa -out node2-key-temp.pem 2048 +openssl pkcs8 -inform PEM -outform PEM -in node2-key-temp.pem -topk8 -nocrypt -v1 PBE-SHA1-3DES -out node2-key.pem +openssl req -new -key node2-key.pem -subj "/C=CA/ST=ONTARIO/L=TORONTO/O=ORG/OU=UNIT/CN=opensearch2" -out node2.csr +echo 'subjectAltName=DNS:opensearch2' > node2.ext +openssl x509 -req -in node2.csr -CA root-ca.pem -CAkey root-ca-key.pem -CAcreateserial -sha256 -out node2.pem -days 730 -extfile node2.ext +# Node cert 3 +openssl genrsa -out node3-key-temp.pem 2048 +openssl pkcs8 -inform PEM -outform PEM -in node3-key-temp.pem -topk8 -nocrypt -v1 PBE-SHA1-3DES -out node3-key.pem +openssl req -new -key node3-key.pem -subj "/C=CA/ST=ONTARIO/L=TORONTO/O=ORG/OU=UNIT/CN=opensearch3" -out node3.csr +echo 'subjectAltName=DNS:opensearch3' > node3.ext +openssl x509 -req -in node3.csr -CA root-ca.pem -CAkey root-ca-key.pem -CAcreateserial -sha256 -out node3.pem -days 730 -extfile node3.ext +# Client cert +#openssl genrsa -out client-key-temp.pem 2048 +#openssl pkcs8 -inform PEM -outform PEM -in client-key-temp.pem -topk8 -nocrypt -v1 PBE-SHA1-3DES -out client-key.pem +#openssl req -new -key client-key.pem -subj "/C=CA/ST=ONTARIO/L=TORONTO/O=ORG/OU=UNIT/CN=client.dns.a-record" -out client.csr +#echo 'subjectAltName=DNS:client.dns.a-record' > client.ext +#openssl x509 -req -in client.csr -CA root-ca.pem -CAkey root-ca-key.pem -CAcreateserial -sha256 -out client.pem -days 730 -extfile client.ext +# Cleanup +rm admin-key-temp.pem +rm admin.csr +rm node1-key-temp.pem +rm node1.csr +rm node1.ext +rm node2-key-temp.pem +rm node2.csr +rm node2.ext +rm node3-key-temp.pem +rm node3.csr +rm node3.ext +#rm client-key-temp.pem +#rm client.csr +#rm client.ext + + +keytool -import -trustcacerts -alias opensearch1 -file node1.pem -keystore keystore.jks +keytool -import -trustcacerts -alias opensearch2 -file node2.pem -keystore keystore.jks +keytool -import -trustcacerts -alias opensearch3 -file node3.pem -keystore keystore.jks +keytool -import -trustcacerts -alias admin -file admin.pem -keystore keystore.jks +keytool -import -trustcacerts -alias root -file root-ca.pem -keystore keystore.jks + diff --git a/data-node/migration/create-docker-images.sh b/data-node/migration/create-docker-images.sh new file mode 100755 index 000000000000..34d97c2fc352 --- /dev/null +++ b/data-node/migration/create-docker-images.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# script to build local docker images for graylog and the datanode +# +# put this script in the root dir where you have graylog checked out, also check out the graylog-docker repo in the same dir. +# you should have the following structure: +# +# ls [enter] +# create-docker-images.sh +# graylog-docker +# graylog-project-internal +# graylog-project-repos +# +# run the script after a "mvn -DskipTests clean package" to build the images +# +# put the following lines in your .env file to reference locally created docker images +# DATANODE_IMAGE=graylog/graylog-datanode:local +# GRAYLOG_IMAGE=graylog/graylog:local +# + +cd graylog-docker + +cp `ls -1 ../graylog-project-internal/target/artifacts/graylog-datanode/graylog-datanode-6.0.0-SNAPSHOT-*-linux-x64.tar.gz|tail -1` . +docker build -t graylog/graylog-datanode:local --build-arg JAVA_VERSION_MAJOR=17 --build-arg LOCAL_BUILD_TGZ=`ls -1 graylog-datanode-6.0.0-SNAPSHOT-*-linux-x64.tar.gz|tail -1` -f docker/datanode/Dockerfile . +rm `ls -1 graylog-datanode-6.0.0-SNAPSHOT-*-linux-x64.tar.gz|tail -1` + +cp `ls -1 ../graylog-project-internal/target/artifacts/graylog-enterprise/graylog-enterprise-6.0.0-SNAPSHOT-*-linux-x64.tar.gz|tail -1` . +docker build -t graylog/graylog:local --build-arg JAVA_VERSION_MAJOR=17 --build-arg LOCAL_BUILD_TGZ=`ls -1 graylog-enterprise-6.0.0-SNAPSHOT-*-linux-x64.tar.gz|tail -1` -f docker/enterprise/Dockerfile . +rm `ls -1 graylog-enterprise-6.0.0-SNAPSHOT-*-linux-x64.tar.gz|tail -1` + +cd .. diff --git a/data-node/migration/custom-opensearch.yml b/data-node/migration/custom-opensearch.yml new file mode 100644 index 000000000000..23f9c9c87c60 --- /dev/null +++ b/data-node/migration/custom-opensearch.yml @@ -0,0 +1,30 @@ +plugins.security.allow_unsafe_democertificates: true +plugins.security.ssl.transport.pemcert_filepath: node.pem +plugins.security.ssl.transport.pemkey_filepath: node-key.pem +plugins.security.ssl.transport.pemtrustedcas_filepath: root-ca.pem +plugins.security.ssl.transport.enforce_hostname_verification: false +plugins.security.ssl.transport.resolve_hostname: false +plugins.security.ssl.http.enabled: true +plugins.security.ssl.http.pemcert_filepath: node.pem +plugins.security.ssl.http.pemkey_filepath: node-key.pem +plugins.security.ssl.http.pemtrustedcas_filepath: root-ca.pem +plugins.security.authcz.admin_dn: + - 'CN=A,OU=UNIT,O=ORG,L=TORONTO,ST=ONTARIO,C=CA' +plugins.security.nodes_dn: + - 'CN=opensearch1,OU=UNIT,O=ORG,L=TORONTO,ST=ONTARIO,C=CA' + - 'CN=opensearch2,OU=UNIT,O=ORG,L=TORONTO,ST=ONTARIO,C=CA' + - 'CN=opensearch3,OU=UNIT,O=ORG,L=TORONTO,ST=ONTARIO,C=CA' +#plugins.security.audit.type: internal_opensearch +plugins.security.allow_default_init_securityindex: true +plugins.security.system_indices.enabled: true + +plugins.security.ssl.transport.truststore_type: JKS +plugins.security.ssl.transport.truststore_filepath: keystore.jks +plugins.security.ssl.transport.truststore_password: password + +plugins.security.ssl.http.truststore_type: JKS +plugins.security.ssl.http.truststore_filepath: keystore.jks +plugins.security.ssl.http.truststore_password: password + +logger.org.opensearch: debug + diff --git a/data-node/migration/datanode-security-config.yml b/data-node/migration/datanode-security-config.yml new file mode 100644 index 000000000000..daf87ed876d0 --- /dev/null +++ b/data-node/migration/datanode-security-config.yml @@ -0,0 +1,247 @@ +--- + +# This is the main OpenSearch Security configuration file where authentication +# and authorization is defined. +# +# You need to configure at least one authentication domain in the authc of this file. +# An authentication domain is responsible for extracting the user credentials from +# the request and for validating them against an authentication backend like Active Directory for example. +# +# If more than one authentication domain is configured the first one which succeeds wins. +# If all authentication domains fail then the request is unauthenticated. +# In this case an exception is thrown and/or the HTTP status is set to 401. +# +# After authentication authorization (authz) will be applied. There can be zero or more authorizers which collect +# the roles from a given backend for the authenticated user. +# +# Both, authc and auth can be enabled/disabled separately for REST and TRANSPORT layer. Default is true for both. +# http_enabled: true +# transport_enabled: true +# +# For HTTP it is possible to allow anonymous authentication. If that is the case then the HTTP authenticators try to +# find user credentials in the HTTP request. If credentials are found then the user gets regularly authenticated. +# If none can be found the user will be authenticated as an "anonymous" user. This user has always the username "anonymous" +# and one role named "anonymous_backendrole". +# If you enable anonymous authentication all HTTP authenticators will not challenge. +# +# +# Note: If you define more than one HTTP authenticators make sure to put non-challenging authenticators like "proxy" or "clientcert" +# first and the challenging one last. +# Because it's not possible to challenge a client with two different authentication methods (for example +# Kerberos and Basic) only one can have the challenge flag set to true. You can cope with this situation +# by using pre-authentication, e.g. sending a HTTP Basic authentication header in the request. +# +# Default value of the challenge flag is true. +# +# +# HTTP +# basic (challenging) +# proxy (not challenging, needs xff) +# kerberos (challenging) +# clientcert (not challenging, needs https) +# jwt (not challenging) +# host (not challenging) #DEPRECATED, will be removed in a future version. +# host based authentication is configurable in roles_mapping + +# Authc +# internal +# noop +# ldap + +# Authz +# ldap +# noop + + + +_meta: + type: "config" + config_version: 2 + +config: + dynamic: + # Set filtered_alias_mode to 'disallow' to forbid more than 2 filtered aliases per index + # Set filtered_alias_mode to 'warn' to allow more than 2 filtered aliases per index but warns about it (default) + # Set filtered_alias_mode to 'nowarn' to allow more than 2 filtered aliases per index silently + #filtered_alias_mode: warn + #do_not_fail_on_forbidden: false + #kibana: + # Kibana multitenancy + #multitenancy_enabled: true + #server_username: kibanaserver + #index: '.kibana' + http: + anonymous_auth_enabled: false + xff: + enabled: false + internalProxies: '192\.168\.0\.10|192\.168\.0\.11' # regex pattern + #internalProxies: '.*' # trust all internal proxies, regex pattern + #remoteIpHeader: 'x-forwarded-for' + ###### see https://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html for regex help + ###### more information about XFF https://en.wikipedia.org/wiki/X-Forwarded-For + ###### and here https://tools.ietf.org/html/rfc7239 + ###### and https://tomcat.apache.org/tomcat-8.0-doc/config/valve.html#Remote_IP_Valve + authc: + kerberos_auth_domain: + http_enabled: false + transport_enabled: false + order: 6 + http_authenticator: + type: kerberos + challenge: true + config: + # If true a lot of kerberos/security related debugging output will be logged to standard out + krb_debug: false + # If true then the realm will be stripped from the user name + strip_realm_from_principal: true + authentication_backend: + type: noop + basic_internal_auth_domain: + description: "Authenticate via HTTP Basic against internal users database" + http_enabled: false + transport_enabled: false + order: 4 + http_authenticator: + type: basic + challenge: true + authentication_backend: + type: intern + proxy_auth_domain: + description: "Authenticate via proxy" + http_enabled: false + transport_enabled: false + order: 3 + http_authenticator: + type: proxy + challenge: false + config: + user_header: "x-proxy-user" + roles_header: "x-proxy-roles" + authentication_backend: + type: noop + jwt_auth_domain: + description: "Authenticate via Json Web Token" + http_enabled: true + transport_enabled: true + order: 0 + http_authenticator: + type: jwt + challenge: false + config: + signing_key: "base64 encoded GRAYLOG_PASSWORD_SECRET from .env file" + jwt_header: "Authorization" + jwt_url_parameter: null + roles_key: "os_roles" + subject_key: null + authentication_backend: + type: noop + clientcert_auth_domain: + description: "Authenticate via SSL client certificates" + http_enabled: false + transport_enabled: false + order: 2 + http_authenticator: + type: clientcert + config: + username_attribute: cn #optional, if omitted DN becomes username + challenge: false + authentication_backend: + type: noop + ldap: + description: "Authenticate via LDAP or Active Directory" + http_enabled: false + transport_enabled: false + order: 5 + http_authenticator: + type: basic + challenge: false + authentication_backend: + # LDAP authentication backend (authenticate users against a LDAP or Active Directory) + type: ldap + config: + # enable ldaps + enable_ssl: false + # enable start tls, enable_ssl should be false + enable_start_tls: false + # send client certificate + enable_ssl_client_auth: false + # verify ldap hostname + verify_hostnames: true + hosts: + - localhost:8389 + bind_dn: null + password: null + userbase: 'ou=people,dc=example,dc=com' + # Filter to search for users (currently in the whole subtree beneath userbase) + # {0} is substituted with the username + usersearch: '(sAMAccountName={0})' + # Use this attribute from the user as username (if not set then DN is used) + username_attribute: null + authz: + roles_from_myldap: + description: "Authorize via LDAP or Active Directory" + http_enabled: false + transport_enabled: false + authorization_backend: + # LDAP authorization backend (gather roles from a LDAP or Active Directory, you have to configure the above LDAP authentication backend settings too) + type: ldap + config: + # enable ldaps + enable_ssl: false + # enable start tls, enable_ssl should be false + enable_start_tls: false + # send client certificate + enable_ssl_client_auth: false + # verify ldap hostname + verify_hostnames: true + hosts: + - localhost:8389 + bind_dn: null + password: null + rolebase: 'ou=groups,dc=example,dc=com' + # Filter to search for roles (currently in the whole subtree beneath rolebase) + # {0} is substituted with the DN of the user + # {1} is substituted with the username + # {2} is substituted with an attribute value from user's directory entry, of the authenticated user. Use userroleattribute to specify the name of the attribute + rolesearch: '(member={0})' + # Specify the name of the attribute which value should be substituted with {2} above + userroleattribute: null + # Roles as an attribute of the user entry + userrolename: disabled + #userrolename: memberOf + # The attribute in a role entry containing the name of that role, Default is "name". + # Can also be "dn" to use the full DN as rolename. + rolename: cn + # Resolve nested roles transitive (roles which are members of other roles and so on ...) + resolve_nested_roles: true + userbase: 'ou=people,dc=example,dc=com' + # Filter to search for users (currently in the whole subtree beneath userbase) + # {0} is substituted with the username + usersearch: '(uid={0})' + # Skip users matching a user name, a wildcard or a regex pattern + #skip_users: + # - 'cn=Michael Jackson,ou*people,o=TEST' + # - '/\S*/' + roles_from_another_ldap: + description: "Authorize via another Active Directory" + http_enabled: false + transport_enabled: false + authorization_backend: + type: ldap + #config goes here ... + # auth_failure_listeners: + # ip_rate_limiting: + # type: ip + # allowed_tries: 10 + # time_window_seconds: 3600 + # block_expiry_seconds: 600 + # max_blocked_clients: 100000 + # max_tracked_clients: 100000 + # internal_authentication_backend_limiting: + # type: username + # authentication_backend: intern + # allowed_tries: 10 + # time_window_seconds: 3600 + # block_expiry_seconds: 600 + # max_blocked_clients: 100000 + # max_tracked_clients: 100000 diff --git a/data-node/migration/docker-compose.yml b/data-node/migration/docker-compose.yml new file mode 100644 index 000000000000..8992cddbe44f --- /dev/null +++ b/data-node/migration/docker-compose.yml @@ -0,0 +1,234 @@ +# based on files in: +# https://github.com/Graylog2/docker-compose +# https://opensearch.org/docs/latest/security/configuration/generate-certificates/ +# see additional cert.sh for certificate creation and custom-opensearch.yml for opensearch configuration + +version: "3.8" + +services: + mongodb: + hostname: "mongodb" + image: "mongo:5.0" + ports: + - "27017:27017" + volumes: + - "mongodb-data:/data/db" + + opensearch1: + image: "opensearchproject/opensearch:2.10.0" + hostname: "opensearch1" + environment: + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + - "network.host=opensearch1" + - "node.name=opensearch1" + - "cluster.name=datanode-cluster" + - "network.bind_host=_site_" + - "discovery.seed_hosts=opensearch2,opensearch3" + - "cluster.initial_master_nodes=opensearch1,opensearch2,opensearch3" + - "bootstrap.memory_lock=true" + - "action.auto_create_index=false" + - "DISABLE_INSTALL_DEMO_CONFIG=true" + - "plugins.security.allow_unsafe_democertificates=true" + # - "plugins.security.ssl.transport.enforce_hostname_verification=false" + # - "plugins.security.ssl.transport.resolve_hostname=false" + ports: + - "9200:9200" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + volumes: + - "opensearch-data-01:/usr/share/opensearch/data" + - "./root-ca.pem:/usr/share/opensearch/config/root-ca.pem" + - "./node1.pem:/usr/share/opensearch/config/node.pem" + - "./node1-key.pem:/usr/share/opensearch/config/node-key.pem" + - "./admin.pem:/usr/share/opensearch/config/admin.pem" + - "./admin-key.pem:/usr/share/opensearch/config/admin-key.pem" + - "./keystore.jks:/usr/share/opensearch/config/keystore.jks" + - "./custom-opensearch.yml:/usr/share/opensearch/config/opensearch.yml" + - "./opensearch-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" +# - "./datanode-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + + opensearch2: + image: "opensearchproject/opensearch:2.10.0" + hostname: "opensearch2" + environment: + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + - "network.host=opensearch2" + - "node.name=opensearch2" + - "cluster.name=datanode-cluster" + - "network.bind_host=_site_" + - "discovery.seed_hosts=opensearch1,opensearch3" + - "cluster.initial_master_nodes=opensearch1,opensearch2,opensearch3" + - "bootstrap.memory_lock=true" + - "action.auto_create_index=false" + - "DISABLE_INSTALL_DEMO_CONFIG=true" + - "plugins.security.allow_unsafe_democertificates=true" + # - "plugins.security.ssl.transport.enforce_hostname_verification=false" + # - "plugins.security.ssl.transport.resolve_hostname=false" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + volumes: + - "opensearch-data-02:/usr/share/opensearch/data" + - "./root-ca.pem:/usr/share/opensearch/config/root-ca.pem" + - "./node2.pem:/usr/share/opensearch/config/node.pem" + - "./node2-key.pem:/usr/share/opensearch/config/node-key.pem" + - "./admin.pem:/usr/share/opensearch/config/admin.pem" + - "./admin-key.pem:/usr/share/opensearch/config/admin-key.pem" + - "./keystore.jks:/usr/share/opensearch/config/keystore.jks" + - "./custom-opensearch.yml:/usr/share/opensearch/config/opensearch.yml" + - "./opensearch-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" +# - "./datanode-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + + opensearch3: + image: "opensearchproject/opensearch:2.10.0" + hostname: "opensearch3" + environment: + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + - "network.host=opensearch3" + - "node.name=opensearch3" + - "cluster.name=datanode-cluster" + - "network.bind_host=_site_" + - "discovery.seed_hosts=opensearch1,opensearch2" + - "cluster.initial_master_nodes=opensearch1,opensearch2,opensearch3" + - "bootstrap.memory_lock=true" + - "action.auto_create_index=false" + - "DISABLE_INSTALL_DEMO_CONFIG=true" + - "plugins.security.allow_unsafe_democertificates=true" + # - "plugins.security.ssl.transport.enforce_hostname_verification=false" + # - "plugins.security.ssl.transport.resolve_hostname=false" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + volumes: + - "opensearch-data-03:/usr/share/opensearch/data" + - "./root-ca.pem:/usr/share/opensearch/config/root-ca.pem" + - "./node3.pem:/usr/share/opensearch/config/node.pem" + - "./node3-key.pem:/usr/share/opensearch/config/node-key.pem" + - "./admin.pem:/usr/share/opensearch/config/admin.pem" + - "./admin-key.pem:/usr/share/opensearch/config/admin-key.pem" + - "./keystore.jks:/usr/share/opensearch/config/keystore.jks" + - "./custom-opensearch.yml:/usr/share/opensearch/config/opensearch.yml" + - "./opensearch-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" +# - "./datanode-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + + datanode1: + image: "${DATANODE_IMAGE:-graylog/graylog-datanode:5.2.0}" + hostname: opensearch1 + depends_on: + - "mongodb" + environment: + GRAYLOG_DATANODE_NODE_ID_FILE: "/var/lib/graylog-datanode/node-id" + GRAYLOG_DATANODE_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_DATANODE_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_DATANODE_MONGODB_URI: "mongodb://mongodb:27017/graylog" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + ports: + - "8999:8999/tcp" # DataNode API + - "9200:9200" + volumes: + - "graylog-datanode-01:/var/lib/graylog-datanode" + - "opensearch-data-01:/var/lib/graylog-datanode/opensearch/data" + + datanode2: + image: "${DATANODE_IMAGE:-graylog/graylog-datanode:5.2.0}" + hostname: opensearch2 + depends_on: + - "mongodb" + environment: + GRAYLOG_DATANODE_NODE_ID_FILE: "/var/lib/graylog-datanode/node-id" + GRAYLOG_DATANODE_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_DATANODE_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_DATANODE_MONGODB_URI: "mongodb://mongodb:27017/graylog" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + ports: + - "8998:8999/tcp" # DataNode API + volumes: + - "graylog-datanode-02:/var/lib/graylog-datanode" + - "opensearch-data-02:/var/lib/graylog-datanode/opensearch/data" + + datanode3: + image: "${DATANODE_IMAGE:-graylog/graylog-datanode:5.2.0}" + hostname: opensearch3 + depends_on: + - "mongodb" + environment: + GRAYLOG_DATANODE_NODE_ID_FILE: "/var/lib/graylog-datanode/node-id" + GRAYLOG_DATANODE_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_DATANODE_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_DATANODE_MONGODB_URI: "mongodb://mongodb:27017/graylog" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + ports: + - "8997:8999/tcp" # DataNode API + volumes: + - "graylog-datanode-03:/var/lib/graylog-datanode" + - "opensearch-data-03:/var/lib/graylog-datanode/opensearch/data" + + graylog1: + image: "${GRAYLOG_IMAGE:-graylog/graylog:5.2.0}" + hostname: "graylog1" + depends_on: + - "mongodb" + entrypoint: "/docker-entrypoint.sh" + environment: + GRAYLOG_NODE_ID_FILE: "/usr/share/graylog/data/data/node-id" + GRAYLOG_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_HTTP_EXTERNAL_URI: "http://127.0.0.1:9000/" + GRAYLOG_MONGODB_URI: "mongodb://mongodb:27017/graylog" + GRAYLOG_REPORT_DISABLE_SANDBOX: "true" + GRAYLOG_CA_KEYSTORE_FILE: "/usr/share/graylog/data/keystore.jks" + GRAYLOG_CA_PASSWORD: "password" + GRAYLOG_ELASTICSEARCH_HOSTS: "https://admin:admin@opensearch1:9200,https://admin:admin@opensearch2:9200,https://admin:admin@opensearch3:9200" +# GRAYLOG_ENABLE_PREFLIGHT_WEB: "true" + ports: + - "9000:9000" # Graylog web interface and REST API + - "1514:1514" # Syslog TCP + - "1514:1514/udp" # Syslog UDP + - "12201:12201" # GELF TCP + - "12201:12201/udp" # GELF UDP + volumes: + - "graylog-data-01:/usr/share/graylog/data/data" + - "graylog-journal-01:/usr/share/graylog/data/journal" + - "./keystore.jks:/usr/share/graylog/data/keystore.jks" + +volumes: + graylog-datanode-01: + graylog-datanode-02: + graylog-datanode-03: + graylog-data-01: + graylog-journal-01: + mongodb-data: + opensearch-data-01: + opensearch-data-02: + opensearch-data-03: diff --git a/data-node/migration/env b/data-node/migration/env new file mode 100644 index 000000000000..7a963cb0a334 --- /dev/null +++ b/data-node/migration/env @@ -0,0 +1,14 @@ +# You MUST set a secret to secure/pepper the stored user passwords here. Use at least 64 characters. +# Generate one by using for example: pwgen -N 1 -s 96 +# ATTENTION: This value must be the same on all Graylog nodes in the cluster. +# Changing this value after installation will render all user sessions and encrypted values in the database invalid. (e.g. encrypted access tokens) +GRAYLOG_PASSWORD_SECRET= + +# You MUST specify a hash password for the root user (which you only need to initially set up the +# system and in case you lose connectivity to your authentication backend) +# This password cannot be changed using the API or via the web interface. If you need to change it, +# modify it in this file. +# Create one by using for example: echo -n yourpassword | sha256sum +# and put the resulting hash value into the following line +# CHANGE THIS! +GRAYLOG_ROOT_PASSWORD_SHA2= diff --git a/data-node/migration/es710-docker-compose.yml b/data-node/migration/es710-docker-compose.yml new file mode 100644 index 000000000000..05c89644246f --- /dev/null +++ b/data-node/migration/es710-docker-compose.yml @@ -0,0 +1,287 @@ +# based on files in: +# https://github.com/Graylog2/docker-compose +# https://opensearch.org/docs/latest/security/configuration/generate-certificates/ +# see additional cert.sh for certificate creation and custom-opensearch.yml for opensearch configuration + +version: "3.8" + +services: + mongodb: + hostname: "mongodb" + image: "mongo:5.0" + ports: + - "27017:27017" + volumes: + - "mongodb-data:/data/db" + + elasticsearch1: + image: "docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.2" + hostname: "opensearch1" + environment: + node.name: "opensearch1" + cluster.name: "datanode-cluster" + discovery.seed_hosts: "opensearch2,opensearch3" + cluster.initial_master_nodes: "opensearch1,opensearch2,opensearch3" + bootstrap.memory_lock: "true" + ES_JAVA_OPTS: "-Xms512m -Xmx512m -Dlog4j2.formatMsgNoLookups=true" + ports: + - "9200:9200" + ulimits: + memlock: + soft: -1 + hard: -1 + volumes: + - "opensearch-data-01:/usr/share/elasticsearch/data" + + elasticsearch2: + image: "docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.2" + hostname: "opensearch2" + environment: + node.name: "opensearch2" + cluster.name: "datanode-cluster" + discovery.seed_hosts: "opensearch1,opensearch3" + cluster.initial_master_nodes: "opensearch1,opensearch2,opensearch3" + bootstrap.memory_lock: "true" + ES_JAVA_OPTS: "-Xms512m -Xmx512m -Dlog4j2.formatMsgNoLookups=true" + ulimits: + memlock: + soft: -1 + hard: -1 + volumes: + - "opensearch-data-02:/usr/share/elasticsearch/data" + + elasticsearch3: + image: "docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.2" + hostname: "opensearch3" + environment: + node.name: "opensearch3" + cluster.name: "datanode-cluster" + discovery.seed_hosts: "opensearch1,opensearch2" + cluster.initial_master_nodes: "opensearch1,opensearch2,opensearch3" + bootstrap.memory_lock: "true" + ES_JAVA_OPTS: "-Xms512m -Xmx512m -Dlog4j2.formatMsgNoLookups=true" + ulimits: + memlock: + soft: -1 + hard: -1 + volumes: + - "opensearch-data-03:/usr/share/elasticsearch/data" + + opensearch1: + image: "opensearchproject/opensearch:1.3.1" + hostname: "opensearch1" + environment: + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + - "network.host=opensearch1" + - "node.name=opensearch1" + - "cluster.name=datanode-cluster" + - "network.bind_host=_site_" + - "discovery.seed_hosts=opensearch2,opensearch3" + - "cluster.initial_master_nodes=opensearch1,opensearch2,opensearch3" + - "bootstrap.memory_lock=true" + - "action.auto_create_index=false" + - "DISABLE_INSTALL_DEMO_CONFIG=true" + - "plugins.security.allow_unsafe_democertificates=true" + # - "plugins.security.ssl.transport.enforce_hostname_verification=false" + # - "plugins.security.ssl.transport.resolve_hostname=false" + ports: + - "9200:9200" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + volumes: + - "opensearch-data-01:/usr/share/opensearch/data" + - "./root-ca.pem:/usr/share/opensearch/config/root-ca.pem" + - "./node1.pem:/usr/share/opensearch/config/node.pem" + - "./node1-key.pem:/usr/share/opensearch/config/node-key.pem" + - "./admin.pem:/usr/share/opensearch/config/admin.pem" + - "./admin-key.pem:/usr/share/opensearch/config/admin-key.pem" + - "./custom-opensearch.yml:/usr/share/opensearch/config/opensearch.yml" +# - "./opensearch-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + - "./datanode-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + - "./keystore.jks:/usr/share/opensearch/config/keystore.jks" + + opensearch2: + image: "opensearchproject/opensearch:1.3.1" + hostname: "opensearch2" + environment: + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + - "network.host=opensearch2" + - "node.name=opensearch2" + - "cluster.name=datanode-cluster" + - "network.bind_host=_site_" + - "discovery.seed_hosts=opensearch1,opensearch3" + - "cluster.initial_master_nodes=opensearch1,opensearch2,opensearch3" + - "bootstrap.memory_lock=true" + - "action.auto_create_index=false" + - "DISABLE_INSTALL_DEMO_CONFIG=true" + - "plugins.security.allow_unsafe_democertificates=true" + # - "plugins.security.ssl.transport.enforce_hostname_verification=false" + # - "plugins.security.ssl.transport.resolve_hostname=false" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + volumes: + - "opensearch-data-02:/usr/share/opensearch/data" + - "./root-ca.pem:/usr/share/opensearch/config/root-ca.pem" + - "./node2.pem:/usr/share/opensearch/config/node.pem" + - "./node2-key.pem:/usr/share/opensearch/config/node-key.pem" + - "./admin.pem:/usr/share/opensearch/config/admin.pem" + - "./admin-key.pem:/usr/share/opensearch/config/admin-key.pem" + - "./custom-opensearch.yml:/usr/share/opensearch/config/opensearch.yml" +# - "./opensearch-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + - "./datanode-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + - "./keystore.jks:/usr/share/opensearch/config/keystore.jks" + + opensearch3: + image: "opensearchproject/opensearch:1.3.1" + hostname: "opensearch3" + environment: + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + - "network.host=opensearch3" + - "node.name=opensearch3" + - "cluster.name=datanode-cluster" + - "network.bind_host=_site_" + - "discovery.seed_hosts=opensearch1,opensearch2" + - "cluster.initial_master_nodes=opensearch1,opensearch2,opensearch3" + - "bootstrap.memory_lock=true" + - "action.auto_create_index=false" + - "DISABLE_INSTALL_DEMO_CONFIG=true" + - "plugins.security.allow_unsafe_democertificates=true" + # - "plugins.security.ssl.transport.enforce_hostname_verification=false" + # - "plugins.security.ssl.transport.resolve_hostname=false" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + volumes: + - "opensearch-data-03:/usr/share/opensearch/data" + - "./root-ca.pem:/usr/share/opensearch/config/root-ca.pem" + - "./node3.pem:/usr/share/opensearch/config/node.pem" + - "./node3-key.pem:/usr/share/opensearch/config/node-key.pem" + - "./admin.pem:/usr/share/opensearch/config/admin.pem" + - "./admin-key.pem:/usr/share/opensearch/config/admin-key.pem" + - "./custom-opensearch.yml:/usr/share/opensearch/config/opensearch.yml" +# - "./opensearch-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + - "./datanode-security-config.yml:/usr/share/opensearch/config/opensearch-security/config.yml" + - "./keystore.jks:/usr/share/opensearch/config/keystore.jks" + + datanode1: + image: "${DATANODE_IMAGE:-graylog/graylog-datanode:5.2.0}" + hostname: opensearch1 + depends_on: + - "mongodb" + environment: + GRAYLOG_DATANODE_NODE_ID_FILE: "/var/lib/graylog-datanode/node-id" + GRAYLOG_DATANODE_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_DATANODE_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_DATANODE_MONGODB_URI: "mongodb://mongodb:27017/graylog" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + ports: + - "8999:8999/tcp" # DataNode API + - "9200:9200" + volumes: + - "graylog-datanode-01:/var/lib/graylog-datanode" + - "opensearch-data-01:/var/lib/graylog-datanode/opensearch/data" + + datanode2: + image: "${DATANODE_IMAGE:-graylog/graylog-datanode:5.2.0}" + hostname: opensearch2 + depends_on: + - "mongodb" + environment: + GRAYLOG_DATANODE_NODE_ID_FILE: "/var/lib/graylog-datanode/node-id" + GRAYLOG_DATANODE_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_DATANODE_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_DATANODE_MONGODB_URI: "mongodb://mongodb:27017/graylog" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + ports: + - "8998:8999/tcp" # DataNode API + volumes: + - "graylog-datanode-02:/var/lib/graylog-datanode" + - "opensearch-data-02:/var/lib/graylog-datanode/opensearch/data" + + datanode3: + image: "${DATANODE_IMAGE:-graylog/graylog-datanode:5.2.0}" + hostname: opensearch3 + depends_on: + - "mongodb" + environment: + GRAYLOG_DATANODE_NODE_ID_FILE: "/var/lib/graylog-datanode/node-id" + GRAYLOG_DATANODE_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_DATANODE_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_DATANODE_MONGODB_URI: "mongodb://mongodb:27017/graylog" + ulimits: + memlock: + hard: -1 + soft: -1 + nofile: + soft: 65536 + hard: 65536 + ports: + - "8997:8999/tcp" # DataNode API + volumes: + - "graylog-datanode-03:/var/lib/graylog-datanode" + - "opensearch-data-03:/var/lib/graylog-datanode/opensearch/data" + + graylog1: + image: "${GRAYLOG_IMAGE:-graylog/graylog:5.2.0}" + hostname: "graylog1" + depends_on: + - "mongodb" + entrypoint: "/docker-entrypoint.sh" + environment: + GRAYLOG_NODE_ID_FILE: "/usr/share/graylog/data/data/node-id" + GRAYLOG_PASSWORD_SECRET: "${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}" + GRAYLOG_ROOT_PASSWORD_SHA2: "${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}" + GRAYLOG_HTTP_EXTERNAL_URI: "http://127.0.0.1:9000/" + GRAYLOG_MONGODB_URI: "mongodb://mongodb:27017/graylog" + GRAYLOG_REPORT_DISABLE_SANDBOX: "true" +# GRAYLOG_CA_KEYSTORE_FILE: "/usr/share/graylog/data/keystore.jks" +# GRAYLOG_CA_PASSWORD: "password" +# GRAYLOG_ELASTICSEARCH_HOSTS: "https://admin:admin@opensearch1:9200,https://admin:admin@opensearch2:9200,https://admin:admin@opensearch3:9200" + GRAYLOG_ENABLE_PREFLIGHT_WEB: "true" + ports: + - "9000:9000" # Graylog web interface and REST API + - "1514:1514" # Syslog TCP + - "1514:1514/udp" # Syslog UDP + - "12201:12201" # GELF TCP + - "12201:12201/udp" # GELF UDP + volumes: + - "graylog-data-01:/usr/share/graylog/data/data" + - "graylog-journal-01:/usr/share/graylog/data/journal" + - "./keystore.jks:/usr/share/graylog/data/keystore.jks" + +volumes: + graylog-datanode-01: + graylog-datanode-02: + graylog-datanode-03: + graylog-data-01: + graylog-journal-01: + mongodb-data: + opensearch-data-01: + opensearch-data-02: + opensearch-data-03: diff --git a/data-node/migration/opensearch-security-config.yml b/data-node/migration/opensearch-security-config.yml new file mode 100644 index 000000000000..1493a0d7f17a --- /dev/null +++ b/data-node/migration/opensearch-security-config.yml @@ -0,0 +1,250 @@ +--- + +# This is the main OpenSearch Security configuration file where authentication +# and authorization is defined. +# +# You need to configure at least one authentication domain in the authc of this file. +# An authentication domain is responsible for extracting the user credentials from +# the request and for validating them against an authentication backend like Active Directory for example. +# +# If more than one authentication domain is configured the first one which succeeds wins. +# If all authentication domains fail then the request is unauthenticated. +# In this case an exception is thrown and/or the HTTP status is set to 401. +# +# After authentication authorization (authz) will be applied. There can be zero or more authorizers which collect +# the roles from a given backend for the authenticated user. +# +# Both, authc and auth can be enabled/disabled separately for REST and TRANSPORT layer. Default is true for both. +# http_enabled: true +# transport_enabled: true +# +# For HTTP it is possible to allow anonymous authentication. If that is the case then the HTTP authenticators try to +# find user credentials in the HTTP request. If credentials are found then the user gets regularly authenticated. +# If none can be found the user will be authenticated as an "anonymous" user. This user has always the username "anonymous" +# and one role named "anonymous_backendrole". +# If you enable anonymous authentication all HTTP authenticators will not challenge. +# +# +# Note: If you define more than one HTTP authenticators make sure to put non-challenging authenticators like "proxy" or "clientcert" +# first and the challenging one last. +# Because it's not possible to challenge a client with two different authentication methods (for example +# Kerberos and Basic) only one can have the challenge flag set to true. You can cope with this situation +# by using pre-authentication, e.g. sending a HTTP Basic authentication header in the request. +# +# Default value of the challenge flag is true. +# +# +# HTTP +# basic (challenging) +# proxy (not challenging, needs xff) +# kerberos (challenging) +# clientcert (not challenging, needs https) +# jwt (not challenging) +# host (not challenging) #DEPRECATED, will be removed in a future version. +# host based authentication is configurable in roles_mapping + +# Authc +# internal +# noop +# ldap + +# Authz +# ldap +# noop + + + +_meta: + type: "config" + config_version: 2 + +config: + dynamic: + # Set filtered_alias_mode to 'disallow' to forbid more than 2 filtered aliases per index + # Set filtered_alias_mode to 'warn' to allow more than 2 filtered aliases per index but warns about it (default) + # Set filtered_alias_mode to 'nowarn' to allow more than 2 filtered aliases per index silently + #filtered_alias_mode: warn + #do_not_fail_on_forbidden: false + #kibana: + # Kibana multitenancy + #multitenancy_enabled: true + #private_tenant_enabled: true + #default_tenant: "" + #server_username: kibanaserver + #index: '.kibana' + http: + anonymous_auth_enabled: false + xff: + enabled: false + internalProxies: '192\.168\.0\.10|192\.168\.0\.11' # regex pattern + #internalProxies: '.*' # trust all internal proxies, regex pattern + #remoteIpHeader: 'x-forwarded-for' + ###### see https://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html for regex help + ###### more information about XFF https://en.wikipedia.org/wiki/X-Forwarded-For + ###### and here https://tools.ietf.org/html/rfc7239 + ###### and https://tomcat.apache.org/tomcat-8.0-doc/config/valve.html#Remote_IP_Valve + authc: + kerberos_auth_domain: + http_enabled: false + transport_enabled: false + order: 6 + http_authenticator: + type: kerberos + challenge: true + config: + # If true a lot of kerberos/security related debugging output will be logged to standard out + krb_debug: false + # If true then the realm will be stripped from the user name + strip_realm_from_principal: true + authentication_backend: + type: noop + basic_internal_auth_domain: + description: "Authenticate via HTTP Basic against internal users database" + http_enabled: true + transport_enabled: true + order: 4 + http_authenticator: + type: basic + challenge: true + authentication_backend: + type: intern + proxy_auth_domain: + description: "Authenticate via proxy" + http_enabled: false + transport_enabled: false + order: 3 + http_authenticator: + type: proxy + challenge: false + config: + user_header: "x-proxy-user" + roles_header: "x-proxy-roles" + authentication_backend: + type: noop + jwt_auth_domain: + description: "Authenticate via Json Web Token" + http_enabled: false + transport_enabled: false + order: 0 + http_authenticator: + type: jwt + challenge: false + config: + signing_key: "base64 encoded HMAC key or public RSA/ECDSA pem key" + jwt_header: "Authorization" + jwt_url_parameter: null + jwt_clock_skew_tolerance_seconds: 30 + roles_key: null + subject_key: null + authentication_backend: + type: noop + clientcert_auth_domain: + description: "Authenticate via SSL client certificates" + http_enabled: false + transport_enabled: false + order: 2 + http_authenticator: + type: clientcert + config: + username_attribute: cn #optional, if omitted DN becomes username + challenge: false + authentication_backend: + type: noop + ldap: + description: "Authenticate via LDAP or Active Directory" + http_enabled: false + transport_enabled: false + order: 5 + http_authenticator: + type: basic + challenge: false + authentication_backend: + # LDAP authentication backend (authenticate users against a LDAP or Active Directory) + type: ldap + config: + # enable ldaps + enable_ssl: false + # enable start tls, enable_ssl should be false + enable_start_tls: false + # send client certificate + enable_ssl_client_auth: false + # verify ldap hostname + verify_hostnames: true + hosts: + - localhost:8389 + bind_dn: null + password: null + userbase: 'ou=people,dc=example,dc=com' + # Filter to search for users (currently in the whole subtree beneath userbase) + # {0} is substituted with the username + usersearch: '(sAMAccountName={0})' + # Use this attribute from the user as username (if not set then DN is used) + username_attribute: null + authz: + roles_from_myldap: + description: "Authorize via LDAP or Active Directory" + http_enabled: false + transport_enabled: false + authorization_backend: + # LDAP authorization backend (gather roles from a LDAP or Active Directory, you have to configure the above LDAP authentication backend settings too) + type: ldap + config: + # enable ldaps + enable_ssl: false + # enable start tls, enable_ssl should be false + enable_start_tls: false + # send client certificate + enable_ssl_client_auth: false + # verify ldap hostname + verify_hostnames: true + hosts: + - localhost:8389 + bind_dn: null + password: null + rolebase: 'ou=groups,dc=example,dc=com' + # Filter to search for roles (currently in the whole subtree beneath rolebase) + # {0} is substituted with the DN of the user + # {1} is substituted with the username + # {2} is substituted with an attribute value from user's directory entry, of the authenticated user. Use userroleattribute to specify the name of the attribute + rolesearch: '(member={0})' + # Specify the name of the attribute which value should be substituted with {2} above + userroleattribute: null + # Roles as an attribute of the user entry + userrolename: disabled + #userrolename: memberOf + # The attribute in a role entry containing the name of that role, Default is "name". + # Can also be "dn" to use the full DN as rolename. + rolename: cn + # Resolve nested roles transitive (roles which are members of other roles and so on ...) + resolve_nested_roles: true + userbase: 'ou=people,dc=example,dc=com' + # Filter to search for users (currently in the whole subtree beneath userbase) + # {0} is substituted with the username + usersearch: '(uid={0})' + # Skip users matching a user name, a wildcard or a regex pattern + #skip_users: + # - 'cn=Michael Jackson,ou*people,o=TEST' + # - '/\S*/' + roles_from_another_ldap: + description: "Authorize via another Active Directory" + http_enabled: false + transport_enabled: false + authorization_backend: + type: ldap + #config goes here ... + # auth_failure_listeners: + # ip_rate_limiting: + # type: ip + # allowed_tries: 10 + # time_window_seconds: 3600 + # block_expiry_seconds: 600 + # max_blocked_clients: 100000 + # max_tracked_clients: 100000 + # internal_authentication_backend_limiting: + # type: username + # authentication_backend: intern + # allowed_tries: 10 + # time_window_seconds: 3600 + # block_expiry_seconds: 600 + # max_blocked_clients: 100000 + # max_tracked_clients: 100000 diff --git a/data-node/pom.xml b/data-node/pom.xml new file mode 100644 index 000000000000..c66d5bf48b8a --- /dev/null +++ b/data-node/pom.xml @@ -0,0 +1,902 @@ + + + + + org.graylog + graylog-project-parent + 6.2.0-SNAPSHOT + ../graylog-project-parent + + + 4.0.0 + + org.graylog2 + data-node + jar + + DataNode + Graylog management tool for Opensearch + + 2023 + + + org.graylog.datanode.bootstrap.Main + + 17 + 17 + UTF-8 + 2.15.0 + + + true + true + true + true + yyyyMMddHHmmss + + + + + org.graylog2 + graylog2-server + ${project.version} + + + org.graylog + graylog-storage-opensearch2 + ${project.version} + + + + com.github.rvesse + airline + + + com.google.auto.value + auto-value-annotations + provided + + + org.graylog.autovalue + auto-value-javabean + provided + + + com.google.guava + guava + + + com.google.inject + guice + + + com.google.inject.extensions + guice-assistedinject + + + jakarta.inject + jakarta.inject-api + + + org.graylog + jadconfig + + + com.github.zafarkhaja + java-semver + + + org.graylog.repackaged + semver4j + + + org.mongodb + mongodb-driver-sync + + + org.mongodb + mongodb-driver-legacy + + + org.mongojack + mongojack + + + org.graylog.repackaged + os-platform-finder + + + io.dropwizard.metrics + metrics-annotation + + + io.dropwizard.metrics + metrics-core + + + io.dropwizard.metrics + metrics-log4j2 + + + io.dropwizard.metrics + metrics-jvm + + + io.dropwizard.metrics + metrics-jmx + + + io.dropwizard.metrics + metrics-json + + + org.bouncycastle + bcpkix-jdk18on + + + org.bouncycastle + bcprov-jdk18on + + + io.netty + netty-common + + + io.netty + netty-buffer + + + io.netty + netty-handler + + + io.netty + netty-codec + + + io.netty + netty-codec-dns + + + io.netty + netty-codec-http + + + io.netty + netty-resolver-dns + + + io.netty + netty-transport-native-epoll + linux-x86_64 + + + io.netty + netty-transport-native-epoll + linux-aarch_64 + + + io.netty + netty-transport-native-kqueue + osx-x86_64 + + + io.netty + netty-transport-native-kqueue + osx-aarch_64 + + + io.netty + netty-tcnative-boringssl-static + osx-x86_64 + + + io.netty + netty-tcnative-boringssl-static + osx-aarch_64 + + + io.netty + netty-tcnative-boringssl-static + linux-x86_64 + + + io.netty + netty-tcnative-boringssl-static + linux-aarch_64 + + + io.jsonwebtoken + jjwt-api + + + io.jsonwebtoken + jjwt-impl + + + io.jsonwebtoken + jjwt-jackson + runtime + + + org.apache.lucene + lucene-backward-codecs + ${lucene.version} + + + + org.glassfish.jersey.inject + jersey-hk2 + + + org.glassfish.jersey.ext + jersey-bean-validation + + + org.glassfish.jersey.media + jersey-media-multipart + + + org.glassfish.jersey.containers + jersey-container-grizzly2-http + + + org.glassfish.hk2 + guice-bridge + + + org.glassfish.hk2 + hk2-api + + + org.glassfish.hk2 + hk2-locator + + + jakarta.ws.rs + jakarta.ws.rs-api + + + javax.xml.bind + jaxb-api + + + org.apache.logging.log4j + log4j-api + + + org.apache.logging.log4j + log4j-core + + + org.apache.logging.log4j + log4j-slf4j2-impl + + + org.slf4j + jcl-over-slf4j + + + org.slf4j + log4j-over-slf4j + + + org.apache.logging.log4j + log4j-jul + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.datatype + jackson-datatype-guava + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + + + com.fasterxml.jackson.datatype + jackson-datatype-joda + + + com.fasterxml.jackson.module + jackson-module-jsonSchema-jakarta + + + com.fasterxml.jackson.jakarta.rs + jackson-jakarta-rs-json-provider + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + + + com.fasterxml.jackson.dataformat + jackson-dataformat-csv + + + + org.reflections + reflections + + + + com.github.rholder + guava-retrying + + + + commons-io + commons-io + ${commons-io.version} + + + + joda-time + joda-time + ${joda-time.version} + + + + org.cryptomator + siv-mode + + + + org.apache.shiro + shiro-core + + + + org.graylog.shaded + opensearch2-rest-high-level-client + ${opensearch.shaded.version} + + + commons-logging + commons-logging + + + + + org.graylog.shaded + opensearch2-rest-client-sniffer + ${opensearch.shaded.version} + + + commons-logging + commons-logging + + + + + + + org.apache.commons + commons-collections4 + 4.4 + + + + org.apache.commons + commons-exec + + + + com.github.stateless4j + stateless4j + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-smile + + + + org.junit.jupiter + junit-jupiter-engine + test + + + + org.assertj + assertj-core + test + + + + org.testcontainers + testcontainers + test + + + + org.mockito + mockito-core + test + + + + io.rest-assured + rest-assured + test + + + org.mockito + mockito-junit-jupiter + test + + + + org.graylog2 + graylog2-server + ${project.version} + test-jar + test + + + + + + + datanode + + + + src/main/resources + + **/*.properties + + + + src/main/resources + true + + **/*.properties + + + + + + + src/test/resources + true + + indices/** + + + + src/test/resources/indices + ${project.build.testOutputDirectory}/indices + false + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.6.0 + + + org.codehaus.mojo + buildnumber-maven-plugin + 3.2.1 + + + org.codehaus.mojo + exec-maven-plugin + 3.5.0 + + + + + + + org.graylog.repackaged + download-maven-plugin + ${download-maven-plugin.version} + + + unpack-opensearch-tar-linux-x64 + generate-resources + + wget + + + + https://artifacts.opensearch.org/releases/bundle/opensearch/${opensearch.version}/opensearch-${opensearch.version}-linux-x64.tar.gz + + + a72f218b581903b1ddf1072498560bc5304516ed57feb39735f920985740366c2779ed9174251a13b9a9dabdfc184119c325bb387dadf96485dc7964ecf32d54 + + opensearch-${opensearch.version}-linux-x64.tar.gz + ${project.build.directory}/opensearch + true + + + ^(opensearch-\d+\.\d+\.\d+)/ + $1-linux-x64/ + + + + + + unpack-opensearch-tar-linux-aarch64 + generate-resources + + wget + + + + https://artifacts.opensearch.org/releases/bundle/opensearch/${opensearch.version}/opensearch-${opensearch.version}-linux-arm64.tar.gz + + + 866c348ac39b7b35e671917349269383c75e1f9d10768742067807569180d1914fd09c6ae7e71d5e74bd77c8c8165ea17c19290b71f887f074dfe8e68dff66bb + + opensearch-${opensearch.version}-linux-aarch64.tar.gz + ${project.build.directory}/opensearch + true + + + ^(opensearch-\d+\.\d+\.\d+)/ + $1-linux-aarch64/ + + + + + + download-repository-s3-plugin + generate-resources + + wget + + + + https://artifacts.opensearch.org/releases/plugins/repository-s3/${opensearch.version}/repository-s3-${opensearch.version}.zip + + repository-s3-${opensearch.version}.zip + ${project.build.directory}/opensearch-plugins + + de44a181133a33a08703e8464b52aa2ac29d1d9918d9478c307b98909a571baaaf0c9dcb664349802c3dd85fa53275bd2228248fd129179631b9096d470eb8d5 + + + + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + fix-opensearch-config-permissions + prepare-package + + run + + + + + + + + + + remove-unused-opensearch-plugins + prepare-package + + run + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + exec-maven-plugin + org.codehaus.mojo + + + install-required-opensearch-plugins-x64 + prepare-package + + exec + + + bash + + ${java.home} + ${project.build.directory}/opensearch/opensearch-${opensearch.version}-linux-x64/bin/opensearch-plugin + file://${project.build.directory}/opensearch-plugins/repository-s3-${opensearch.version}.zip + + + -c + $BIN list repository-s3 | grep -q repository-s3 || $BIN install --batch $FILE + + + + + install-required-opensearch-plugins-aarch64 + prepare-package + + exec + + + bash + + ${java.home} + ${project.build.directory}/opensearch/opensearch-${opensearch.version}-linux-aarch64/bin/opensearch-plugin + file://${project.build.directory}/opensearch-plugins/repository-s3-${opensearch.version}.zip + + + -c + $BIN list repository-s3 | grep -q repository-s3 || $BIN install --batch $FILE + + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${project.build.directory}/lib + false + false + true + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + graylog-datanode-${project.version} + + + true + lib/ + ${mainClass} + + + + + + + org.cyclonedx + cyclonedx-maven-plugin + + + prepare-package + + makeAggregateBom + + + + + application + true + true + true + true + true + false + false + true + all + graylog-data-node-${project.version}-sbom + ${project.build.directory}/sbom + false + true + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + generate-data-node-artifact + package + + single + + + + + true + true + + src/main/assembly/datanode.xml + + + ${project.basedir}/../target/assembly + datanode-${project.version}-${maven.build.timestamp} + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + parse-version + generate-resources + + parse-version + + + maven + + + + + + org.codehaus.mojo + buildnumber-maven-plugin + + + validate + + create + + + + + + com.mycila + license-maven-plugin + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + + + + Data Node packages must be allowed for the data-node/ module. + + org.graylog.datanode.** + + + + + + + + + + + + + release + + datanode-${project.version} + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + true + + + + attach-javadocs + + jar + + + + + + + + + diff --git a/data-node/src/main/assembly/datanode.xml b/data-node/src/main/assembly/datanode.xml new file mode 100644 index 000000000000..8c2dc9fca0e7 --- /dev/null +++ b/data-node/src/main/assembly/datanode.xml @@ -0,0 +1,83 @@ + + + datanode-tarball + + tar.gz + + false + + + ${project.basedir} + . + + README.md + LICENSE + UPGRADING.md + + + + ${project.build.directory}/opensearch + dist + + */** + + + + ${project.build.directory}/lib + lib + + */** + + + + ${project.build.directory}/sbom + + graylog-data-node-*-sbom.json + graylog-data-node-*-sbom.xml + + sbom + + + + + ./ + log + + */** + + + + + + ${project.basedir}/bin/graylog-datanode.sh + graylog-datanode + bin + 0755 + + + ${project.basedir}/config/jvm.options + jvm.options + config + 0644 + + + ${project.basedir}/config/log4j2.xml + log4j2.xml + config + 0644 + + + ${project.build.directory}/graylog-datanode-${project.version}.jar + graylog-datanode.jar + . + + + ${project.basedir}/../misc/datanode.conf + datanode.conf.example + . + + + diff --git a/data-node/src/main/java/org/graylog/datanode/Configuration.java b/data-node/src/main/java/org/graylog/datanode/Configuration.java new file mode 100644 index 000000000000..5e195ae6d459 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/Configuration.java @@ -0,0 +1,700 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode; + +import com.github.joschi.jadconfig.Parameter; +import com.github.joschi.jadconfig.ParameterException; +import com.github.joschi.jadconfig.ValidationException; +import com.github.joschi.jadconfig.Validator; +import com.github.joschi.jadconfig.ValidatorMethod; +import com.github.joschi.jadconfig.converters.IntegerConverter; +import com.github.joschi.jadconfig.converters.StringListConverter; +import com.github.joschi.jadconfig.converters.StringSetConverter; +import com.github.joschi.jadconfig.util.Duration; +import com.github.joschi.jadconfig.validators.PositiveDurationValidator; +import com.github.joschi.jadconfig.validators.PositiveIntegerValidator; +import com.github.joschi.jadconfig.validators.StringNotBlankValidator; +import com.github.joschi.jadconfig.validators.URIAbsoluteValidator; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.net.InetAddresses; +import org.graylog.datanode.configuration.DatanodeDirectories; +import org.graylog2.CommonNodeConfiguration; +import org.graylog2.Configuration.SafeClassesValidator; +import org.graylog2.configuration.Documentation; +import org.graylog2.plugin.Tools; +import org.graylog2.shared.SuppressForbidden; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.io.File; +import java.net.Inet4Address; +import java.net.InetAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.UnknownHostException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +/** + * Helper class to hold configuration of DataNode + */ +@SuppressWarnings("FieldMayBeFinal") +public class Configuration implements CommonNodeConfiguration { + private static final Logger LOG = LoggerFactory.getLogger(Configuration.class); + public static final String TRANSPORT_CERTIFICATE_PASSWORD_PROPERTY = "transport_certificate_password"; + public static final String HTTP_CERTIFICATE_PASSWORD_PROPERTY = "http_certificate_password"; + + public static final int DATANODE_DEFAULT_PORT = 8999; + public static final String DEFAULT_BIND_ADDRESS = "0.0.0.0"; + + @Documentation(visible = false) + @Parameter(value = "installation_source", validators = StringNotBlankValidator.class) + private String installationSource = "unknown"; + + @Deprecated + @Documentation(visible = false) + @Parameter(value = "insecure_startup") + private boolean insecureStartup = false; + + @Documentation("Do not perform any preflight checks when starting Datanode.") + @Parameter(value = "skip_preflight_checks") + private boolean skipPreflightChecks = false; + + @Documentation("How many milliseconds should datanode wait for termination of all tasks during the shutdown.") + @Parameter(value = "shutdown_timeout", validators = PositiveIntegerValidator.class) + protected int shutdownTimeout = 30000; + + @Documentation("Directory where Datanode will search for an opensearch distribution.") + @Parameter(value = "opensearch_location") + private String opensearchDistributionRoot = "dist"; + + @Documentation("Data directory of the embedded opensearch. Contains indices of the opensearch. May be pointed to an existing" + + "opensearch directory during in-place migration to Datanode") + @Parameter(value = "opensearch_data_location", required = true) + private Path opensearchDataLocation = Path.of("datanode/data"); + + @Documentation("Logs directory of the embedded opensearch") + @Parameter(value = "opensearch_logs_location", required = true, validators = DirectoryWritableValidator.class) + private Path opensearchLogsLocation = Path.of("datanode/logs"); + + @Documentation("Configuration directory of the embedded opensearch. This is the directory where the opensearch" + + "process will store its configuration files. Caution, each start of the Datanode will regenerate the complete content of the directory!") + @Parameter(value = "opensearch_config_location", required = true, validators = DirectoryWritableValidator.class) + private Path opensearchConfigLocation = Path.of("datanode/config"); + + @Documentation("Source directory of the additional configuration files for the Datanode. Additional certificates can be provided here.") + @Parameter(value = "config_location", validators = DirectoryReadableValidator.class) + private Path configLocation = null; + + @Documentation(visible = false) + @Parameter(value = "native_lib_dir", required = true) + private Path nativeLibDir = Path.of("native_libs"); + + @Documentation("How many log entries of the opensearch process should Datanode hold in memory and make accessible via API calls.") + @Parameter(value = "process_logs_buffer_size") + private Integer opensearchProcessLogsBufferSize = 500; + + + @Documentation("Unique name of this Datanode instance. use this, if your node name should be different from the hostname that's found by programmatically looking it up") + @Parameter(value = "node_name") + private String datanodeNodeName; + + + @Documentation("Comma separated list of opensearch nodes that are eligible as manager nodes.") + @Parameter(value = "initial_cluster_manager_nodes") + private String initialClusterManagerNodes; + + @Documentation("Opensearch heap memory. Initial and maxmium heap must be identical for OpenSearch, otherwise the boot fails. So it's only one config option") + @Parameter(value = "opensearch_heap") + private String opensearchHeap = "1g"; + + @Documentation("HTTP port on which the embedded opensearch listens") + @Parameter(value = "opensearch_http_port", converter = IntegerConverter.class) + private int opensearchHttpPort = 9200; + + @Documentation("Transport port on which the embedded opensearch listens") + @Parameter(value = "opensearch_transport_port", converter = IntegerConverter.class) + private int opensearchTransportPort = 9300; + + @Documentation("Provides a list of the addresses of the master-eligible nodes in the cluster.") + @Parameter(value = "opensearch_discovery_seed_hosts", converter = StringListConverter.class) + private List opensearchDiscoverySeedHosts = Collections.emptyList(); + + @Documentation("Binds an OpenSearch node to an address. Use 0.0.0.0 to include all available network interfaces, or specify an IP address assigned to a specific interface. ") + @Parameter(value = "opensearch_network_host") + private String opensearchNetworkHost = null; + + @Documentation("Relative path (to config_location) to a keystore used for opensearch transport layer TLS") + @Parameter(value = "transport_certificate") + private String datanodeTransportCertificate = null; + + @Documentation("Password for a keystore defined in transport_certificate") + @Parameter(value = TRANSPORT_CERTIFICATE_PASSWORD_PROPERTY) + private String datanodeTransportCertificatePassword; + + @Documentation("Transport keystore alias name. Optional. Default is the first alias.") + @Parameter(value = "transport_certificate_alias") + private String datanodeTransportCertificateAlias; + + + @Documentation("Relative path (to config_location) to a keystore used for opensearch REST layer TLS") + @Parameter(value = "http_certificate") + private String datanodeHttpCertificate = null; + + @Documentation("Password for a keystore defined in http_certificate") + @Parameter(value = HTTP_CERTIFICATE_PASSWORD_PROPERTY) + private String datanodeHttpCertificatePassword; + + @Documentation("Http keystore alias name. Optional. Default is the first alias.") + @Parameter(value = "http_certificate_alias") + private String datanodeHttpCertificateAlias; + + + @Documentation("You MUST set a secret to secure/pepper the stored user passwords here. Use at least 16 characters." + + "Generate one by using for example: pwgen -N 1 -s 96 \n" + + "ATTENTION: This value must be the same on all Graylog and Datanode nodes in the cluster. " + + "Changing this value after installation will render all user sessions and encrypted values in the database invalid. (e.g. encrypted access tokens)") + @Parameter(value = "password_secret", required = true, validators = StringNotBlankValidator.class) + private String passwordSecret; + + @Documentation("communication between Graylog and OpenSearch is secured by JWT. This configuration defines interval between token regenerations.") + @Parameter(value = "indexer_jwt_auth_token_caching_duration") + Duration indexerJwtAuthTokenCachingDuration = Duration.seconds(60); + + @Documentation("communication between Graylog and OpenSearch is secured by JWT. This configuration defines validity interval of JWT tokens.") + @Parameter(value = "indexer_jwt_auth_token_expiration_duration") + Duration indexerJwtAuthTokenExpirationDuration = Duration.seconds(180); + + @Documentation("The auto-generated node ID will be stored in this file and read after restarts. It is a good idea " + + "to use an absolute file path here if you are starting Graylog DataNode from init scripts or similar.") + @Parameter(value = "node_id_file", validators = NodeIdFileValidator.class) + private String nodeIdFile = "data/node-id"; + + @Documentation("HTTP bind address. The network interface used by the Graylog DataNode to bind all services.") + @Parameter(value = "bind_address", required = true) + private String bindAddress = DEFAULT_BIND_ADDRESS; + + + @Documentation("HTTP port. The port where the DataNode REST api is listening") + @Parameter(value = "datanode_http_port", required = true) + private int datanodeHttpPort = DATANODE_DEFAULT_PORT; + + @Documentation(visible = false) + @Parameter(value = "hostname") + private String hostname = null; + + @Documentation("Name of the cluster that the embedded opensearch will form. Should be the same for all Datanodes in one cluster.") + @Parameter(value = "clustername") + private String clustername = "datanode-cluster"; + + @Documentation("This configuration should be used if you want to connect to this Graylog DataNode's REST API and it is available on " + + "another network interface than $http_bind_address, " + + "for example if the machine has multiple network interfaces or is behind a NAT gateway.") + @Parameter(value = "http_publish_uri", validators = URIAbsoluteValidator.class) + private URI httpPublishUri; + + + @Documentation("Enable GZIP support for HTTP interface. This compresses API responses and therefore helps to reduce " + + " overall round trip times.") + @Parameter(value = "http_enable_gzip") + private boolean httpEnableGzip = true; + + @Documentation("The maximum size of the HTTP request headers in bytes") + @Parameter(value = "http_max_header_size", required = true, validator = PositiveIntegerValidator.class) + private int httpMaxHeaderSize = 8192; + + @Documentation("The size of the thread pool used exclusively for serving the HTTP interface.") + @Parameter(value = "http_thread_pool_size", required = true, validator = PositiveIntegerValidator.class) + private int httpThreadPoolSize = 64; + + @Documentation(visible = false, value = "The Grizzly default value is equal to `Runtime.getRuntime().availableProcessors()` which doesn't make " + + "sense for Graylog because we are not mainly a web server. " + + "See \"Selector runners count\" at https://grizzly.java.net/bestpractices.html for details.") + @Parameter(value = "http_selector_runners_count", required = true, validator = PositiveIntegerValidator.class) + private int httpSelectorRunnersCount = 1; + + @Documentation(visible = false, value = "TODO: do we need this configuration? We control the decision based on preflight and CA configurations") + @Parameter(value = "http_enable_tls") + private boolean httpEnableTls = false; + + + @Documentation(visible = false, value = "Classes considered safe to load by name. A set of prefixes matched against the fully qualified class name.") + @Parameter(value = org.graylog2.Configuration.SAFE_CLASSES, converter = StringSetConverter.class, validators = SafeClassesValidator.class) + private Set safeClasses = Set.of("org.graylog.", "org.graylog2."); + + @Documentation(visible = false) + @Parameter(value = "metrics_timestamp") + private String metricsTimestamp = "timestamp"; + + @Documentation(visible = false) + @Parameter(value = "metrics_stream") + private String metricsStream = "gl-datanode-metrics"; + + @Documentation(visible = false) + @Parameter(value = "metrics_retention", validators = PositiveDurationValidator.class) + private Duration metricsRetention = Duration.days(14); + + @Documentation(visible = false) + @Parameter(value = "metrics_daily_retention", validators = PositiveDurationValidator.class) + private Duration metricsDailyRetention = Duration.days(365); + + @Documentation(visible = false) + @Parameter(value = "metrics_daily_index") + private String metricsDailyIndex = "gl-datanode-metrics-daily"; + + @Documentation(visible = false) + @Parameter(value = "metrics_policy") + private String metricsPolicy = "gl-datanode-metrics-ism"; + + /** + * @see Searchable snapshots + */ + @Documentation(value = """ + Cache size for searchable snapshots. This space will be automatically reserved + if you have either S3 or shared filesystem repositories enabled and configured. + See s3_client_* configuration options and path_repo. + """) + @Parameter(value = "node_search_cache_size") + private String searchCacheSize = "10gb"; + + /** + * See snapshot documentation + */ + @Documentation("Filesystem path where searchable snapshots should be stored") + @Parameter(value = "path_repo", converter = StringListConverter.class) + private List pathRepo; + + @Documentation("This setting limits the number of clauses a Lucene BooleanQuery can have.") + @Parameter(value = "opensearch_indices_query_bool_max_clause_count") + private Integer indicesQueryBoolMaxClauseCount = 32768; + + @Documentation(""" + List of the opensearch node’s roles. If nothing defined, datanode will use cluster_manager,data,ingest,remote_cluster_client. + If roles are not defined but configuration contains snapshots configuration (path_repo or s3 credentials), the search + role will be automatically added. + """) + @Parameter(value = "node_roles", converter = StringListConverter.class) + private List nodeRoles; + + @Documentation(visible = false) + @Parameter(value = "async_eventbus_processors") + private int asyncEventbusProcessors = 2; + + public int getAsyncEventbusProcessors() { + return asyncEventbusProcessors; + } + + + public Integer getIndicesQueryBoolMaxClauseCount() { + return indicesQueryBoolMaxClauseCount; + } + + @Documentation("Configures verbosity of embedded opensearch logs. Possible values OFF, FATAL, ERROR, WARN, INFO, DEBUG, and TRACE, default is INFO") + @Parameter(value = "opensearch_logger_org_opensearch") + private String opensearchDebug; + + public String getOpensearchDebug() { + return opensearchDebug; + } + + @Documentation("Configures opensearch audit log storage type. See https://opensearch.org/docs/2.13/security/audit-logs/storage-types/") + @Parameter(value = "opensearch_plugins_security_audit_type") + private String opensearchAuditLog; + + public String getOpensearchAuditLog() { + return opensearchAuditLog; + } + + /** + * The insecure flag causes problems on many places. We should replace it with autosecurity option, that would + * configure all the CA and certs automatically. + */ + @Deprecated + public boolean isInsecureStartup() { + return insecureStartup; + } + + public String getInstallationSource() { + return installationSource; + } + + public boolean getSkipPreflightChecks() { + return skipPreflightChecks; + } + + public int getShutdownTimeout() { + return shutdownTimeout; + } + + public String getOpensearchDistributionRoot() { + return opensearchDistributionRoot; + } + + /** + * Use {@link DatanodeDirectories} to obtain a reference to this directory. + */ + public Path getOpensearchConfigLocation() { + return opensearchConfigLocation; + } + + + /** + * This is a pointer to a directory holding configuration files (and certificates) for the datanode itself. + * We treat it as read only for the datanode and should never persist anything in it. + * Use {@link DatanodeDirectories} to obtain a reference to this directory. + */ + @Nullable + public Path getDatanodeConfigurationLocation() { + return configLocation; + } + + /** + * Use {@link DatanodeDirectories} to obtain a reference to this directory. + */ + public Path getOpensearchDataLocation() { + return opensearchDataLocation; + } + + /** + * Use {@link DatanodeDirectories} to obtain a reference to this directory. + */ + public Path getOpensearchLogsLocation() { + return opensearchLogsLocation; + } + + public Integer getProcessLogsBufferSize() { + return opensearchProcessLogsBufferSize; + } + + public String getPasswordSecret() { + return passwordSecret; + } + + @ValidatorMethod + @SuppressWarnings("unused") + public void validatePasswordSecret() throws ValidationException { + if (passwordSecret == null || passwordSecret.length() < 16) { + throw new ValidationException("The minimum length for \"password_secret\" is 16 characters."); + } + } + + public String getDatanodeNodeName() { + return datanodeNodeName != null && !datanodeNodeName.isBlank() ? datanodeNodeName : getHostname(); + } + + public String getInitialClusterManagerNodes() { + return initialClusterManagerNodes; + } + + public int getOpensearchHttpPort() { + return opensearchHttpPort; + } + + public int getOpensearchTransportPort() { + return opensearchTransportPort; + } + + public List getOpensearchDiscoverySeedHosts() { + return opensearchDiscoverySeedHosts; + } + + public String getDatanodeTransportCertificate() { + return datanodeTransportCertificate; + } + + public String getDatanodeTransportCertificatePassword() { + return datanodeTransportCertificatePassword; + } + + public String getDatanodeHttpCertificate() { + return datanodeHttpCertificate; + } + + public String getDatanodeHttpCertificatePassword() { + return datanodeHttpCertificatePassword; + } + + public Optional getOpensearchNetworkHost() { + return Optional.ofNullable(opensearchNetworkHost); + } + + public String getBindAddress() { + return bindAddress; + } + + public int getDatanodeHttpPort() { + return datanodeHttpPort; + } + + public String getClustername() { + return clustername; + } + + + public String getMetricsTimestamp() { + return metricsTimestamp; + } + + public String getMetricsStream() { + return metricsStream; + } + + public Duration getMetricsRetention() { + return metricsRetention; + } + + public String getMetricsDailyIndex() { + return metricsDailyIndex; + } + + public String getMetricsPolicy() { + return metricsPolicy; + } + + public Path getNativeLibDir() { + return nativeLibDir; + } + + public static class NodeIdFileValidator implements Validator { + @Override + public void validate(String name, String path) throws ValidationException { + if (path == null) { + return; + } + final File file = Paths.get(path).toFile(); + final StringBuilder b = new StringBuilder(); + + if (!file.exists()) { + final File parent = file.getParentFile(); + if (!parent.isDirectory()) { + throw new ValidationException("Parent path " + parent + " for Node ID file at " + path + " is not a directory"); + } else { + if (!parent.canRead()) { + throw new ValidationException("Parent directory " + parent + " for Node ID file at " + path + " is not readable"); + } + if (!parent.canWrite()) { + throw new ValidationException("Parent directory " + parent + " for Node ID file at " + path + " is not writable"); + } + + // parent directory exists and is readable and writable + return; + } + } + + if (!file.isFile()) { + b.append("a file"); + } + final boolean readable = file.canRead(); + final boolean writable = file.canWrite(); + if (!readable) { + if (b.length() > 0) { + b.append(", "); + } + b.append("readable"); + } + final boolean empty = file.length() == 0; + if (!writable && readable && empty) { + if (!b.isEmpty()) { + b.append(", "); + } + b.append("writable, but it is empty"); + } + if (b.isEmpty()) { + // all good + return; + } + throw new ValidationException("Node ID file at path " + path + " isn't " + b + ". Please specify the correct path or change the permissions"); + } + } + + public String getUriScheme() { + return isHttpEnableTls() ? "https" : "http"; + } + + @Nullable + private InetAddress toInetAddress(String host) { + try { + return InetAddress.getByName(host); + } catch (UnknownHostException e) { + LOG.debug("Couldn't resolve \"{}\"", host, e); + return null; + } + } + + public URI getHttpPublishUri() { + if (httpPublishUri == null) { + final URI defaultHttpUri = getDefaultHttpUri(); + LOG.debug("No \"http_publish_uri\" set. Using default <{}>.", defaultHttpUri); + return defaultHttpUri; + } else { + final InetAddress inetAddress = toInetAddress(httpPublishUri.getHost()); + if (Tools.isWildcardInetAddress(inetAddress)) { + final URI defaultHttpUri = getDefaultHttpUri(httpPublishUri.getPath()); + LOG.warn("\"{}\" is not a valid setting for \"http_publish_uri\". Using default <{}>.", httpPublishUri, defaultHttpUri); + return defaultHttpUri; + } else { + return Tools.normalizeURI(httpPublishUri, httpPublishUri.getScheme(), DATANODE_DEFAULT_PORT, httpPublishUri.getPath()); + } + } + } + + @VisibleForTesting + URI getDefaultHttpUri() { + return getDefaultHttpUri("/"); + } + + private URI getDefaultHttpUri(String path) { + final URI publishUri; + final InetAddress inetAddress = toInetAddress(bindAddress); + if (Tools.isWildcardInetAddress(inetAddress)) { + final InetAddress guessedAddress; + try { + guessedAddress = Tools.guessPrimaryNetworkAddress(inetAddress instanceof Inet4Address); + + if (guessedAddress.isLoopbackAddress()) { + LOG.debug("Using loopback address {}", guessedAddress); + } + } catch (Exception e) { + LOG.error("Could not guess primary network address for \"http_publish_uri\". Please configure it in your Graylog configuration.", e); + throw new ParameterException("No http_publish_uri.", e); + } + + try { + publishUri = new URI( + getUriScheme(), + null, + guessedAddress.getHostAddress(), + datanodeHttpPort, + path, + null, + null + ); + } catch (URISyntaxException e) { + throw new RuntimeException("Invalid http_publish_uri.", e); + } + } else { + try { + publishUri = new URI( + getUriScheme(), + null, + bindAddress, + datanodeHttpPort, + path, + null, + null + ); + } catch (URISyntaxException e) { + throw new RuntimeException("Invalid http_publish_uri.", e); + } + } + + return publishUri; + } + + + public boolean isHttpEnableGzip() { + return httpEnableGzip; + } + + public int getHttpMaxHeaderSize() { + return httpMaxHeaderSize; + } + + public int getHttpThreadPoolSize() { + return httpThreadPoolSize; + } + + public int getHttpSelectorRunnersCount() { + return httpSelectorRunnersCount; + } + + public boolean isHttpEnableTls() { + return httpEnableTls; + } + + @SuppressForbidden("Deliberate invocation of DNS lookup") + public String getHostname() { + if (hostname != null && !hostname.isBlank()) { + // config setting always takes precedence + return hostname; + } + + if (DEFAULT_BIND_ADDRESS.equals(bindAddress)) { + // no hostname is set, bind address is to 0.0.0.0 -> return host name, the OS finds + return Tools.getLocalCanonicalHostname(); + } + + if (InetAddresses.isInetAddress(bindAddress)) { + // bindaddress is a real IP, resolving the hostname + try { + InetAddress addr = InetAddress.getByName(bindAddress); + return addr.getHostName(); + } catch (UnknownHostException e) { + final var hostname = Tools.getLocalCanonicalHostname(); + LOG.error("Could not resolve {} to hostname, check your DNS. Using {} instead.", bindAddress, hostname); + return hostname; + } + } + + // bindaddress is configured as the hostname + return bindAddress; + } + + public String getNodeSearchCacheSize() { + return searchCacheSize; + } + + public List getPathRepo() { + return pathRepo; + } + + public List getNodeRoles() { + return nodeRoles; + } + + public String getOpensearchHeap() { + return opensearchHeap; + } + + @Override + public String getEnvironmentVariablePrefix() { + return "GRAYLOG_DATANODE_"; + } + + @Override + public String getSystemPropertyPrefix() { + return "graylog.datanode."; + } + + @Override + public boolean withPlugins() { + return true; + } + + public String getDatanodeTransportCertificateAlias() { + return datanodeTransportCertificateAlias; + } + + public String getDatanodeHttpCertificateAlias() { + return datanodeHttpCertificateAlias; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/DirectoryReadableValidator.java b/data-node/src/main/java/org/graylog/datanode/DirectoryReadableValidator.java new file mode 100644 index 000000000000..28752fa00639 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/DirectoryReadableValidator.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode; + +import com.github.joschi.jadconfig.ValidationException; +import com.github.joschi.jadconfig.validators.DirectoryPathReadableValidator; + +import java.nio.file.Files; +import java.nio.file.Path; + +/** + * The original validator doesn't tell if the problem is nonexistence of the directory, file instead of dir + * or permissions problem. This validator provides more specific exception with detailed message. + */ +public class DirectoryReadableValidator extends DirectoryPathReadableValidator { + @Override + public void validate(String name, Path value) throws ValidationException { + if (value == null) { + return; + } + + if (!Files.exists(value)) { + throw new ValidationException("Cannot read from directory " + name + " at path " + value + ". Directory doesn't exist. Please create the directory."); + } + + if (!Files.isDirectory(value)) { + throw new ValidationException("Cannot read from directory " + name + " at path " + value + ". Referenced path is not a directory."); + } + + if (!Files.isReadable(value)) { + throw new ValidationException("Cannot read from directory " + name + " at path " + value + ". Please set read permissions."); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/DirectoryWritableValidator.java b/data-node/src/main/java/org/graylog/datanode/DirectoryWritableValidator.java new file mode 100644 index 000000000000..7585ca3aadb3 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/DirectoryWritableValidator.java @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode; + +import com.github.joschi.jadconfig.ValidationException; +import com.github.joschi.jadconfig.Validator; +import com.github.joschi.jadconfig.validators.DirectoryPathWritableValidator; + +import java.nio.file.Files; +import java.nio.file.Path; + +/** + * The original validator doesn't tell if the problem is nonexistence of the directory, file instead of dir + * or permissions problem. This validator provides more specific exception with detailed message. + */ +public class DirectoryWritableValidator extends DirectoryPathWritableValidator { + @Override + public void validate(String name, Path value) throws ValidationException { + if (value == null) { + return; + } + + if (!Files.exists(value)) { + throw new ValidationException("Cannot write to directory " + name + " at path " + value + ". Directory doesn't exist. Please create the directory."); + } + + if (!Files.isDirectory(value)) { + throw new ValidationException("Cannot write to directory " + name + " at path " + value + ". Referenced path is not a directory."); + } + + if (!Files.isWritable(value)) { + if (Files.isReadable(value)) { + throw new ValidationException("Cannot write to directory " + name + " at path " + value + ". Directory only readable. Please set also write permissions."); + } else { + throw new ValidationException("Cannot write to directory " + name + " at path " + value + ". Directory neither readable not writable. Please set read and write permission."); + } + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/OpensearchDistribution.java b/data-node/src/main/java/org/graylog/datanode/OpensearchDistribution.java new file mode 100644 index 000000000000..954b4540d569 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/OpensearchDistribution.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode; + +import org.graylog.datanode.configuration.OpensearchArchitecture; + +import javax.annotation.Nullable; +import java.nio.file.Path; +import java.nio.file.Paths; + +public record OpensearchDistribution(Path directory, String version, @Nullable String platform, + @Nullable OpensearchArchitecture architecture) { + + public OpensearchDistribution(Path path, String version) { + this(path, version, null, null); + } + + public Path getOpensearchBinDirPath() { + return directory.resolve("bin"); + } + + public Path getOpensearchExecutable() { + return getOpensearchBinDirPath().resolve("opensearch"); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/ConfigurationModule.java b/data-node/src/main/java/org/graylog/datanode/bindings/ConfigurationModule.java new file mode 100644 index 000000000000..5e04ef3eb73e --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/ConfigurationModule.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.inject.Binder; +import com.google.inject.Module; +import org.graylog.datanode.Configuration; + +import static java.util.Objects.requireNonNull; + +public class ConfigurationModule implements Module { + private final Configuration configuration; + + public ConfigurationModule(Configuration configuration) { + this.configuration = requireNonNull(configuration); + } + + @Override + public void configure(Binder binder) { + binder.bind(Configuration.class).toInstance(configuration); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/DatanodeConfigurationBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/DatanodeConfigurationBindings.java new file mode 100644 index 000000000000..345716df7f76 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/DatanodeConfigurationBindings.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.inject.AbstractModule; +import com.google.inject.TypeLiteral; +import org.graylog.datanode.OpensearchDistribution; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.configuration.DatanodeConfigurationProvider; +import org.graylog.datanode.configuration.OpensearchDistributionProvider; +import org.graylog.datanode.configuration.OpensearchKeystoreProvider; +import org.graylog.datanode.filesystem.index.indexreader.ShardStatsParser; +import org.graylog.datanode.filesystem.index.indexreader.ShardStatsParserImpl; +import org.graylog.datanode.filesystem.index.statefile.StateFileParser; +import org.graylog.datanode.filesystem.index.statefile.StateFileParserImpl; +import org.graylog.security.certutil.KeyStoreDto; +import org.graylog2.plugin.system.FilePersistedNodeIdProvider; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.security.JwtSecret; +import org.graylog2.security.JwtSecretProvider; + +import java.util.Map; + +public class DatanodeConfigurationBindings extends AbstractModule { + @Override + protected void configure() { + bind(NodeId.class).toProvider(FilePersistedNodeIdProvider.class).asEagerSingleton(); + bind(new TypeLiteral>() {}).toProvider(OpensearchKeystoreProvider.class); + bind(DatanodeConfiguration.class).toProvider(DatanodeConfigurationProvider.class); + bind(OpensearchDistribution.class).toProvider(OpensearchDistributionProvider.class); + bind(StateFileParser.class).to(StateFileParserImpl.class); + bind(ShardStatsParser.class).to(ShardStatsParserImpl.class); + bind(JwtSecret.class).toProvider(JwtSecretProvider.class).asEagerSingleton(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/DatanodeServerBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/DatanodeServerBindings.java new file mode 100644 index 000000000000..58387dcb3426 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/DatanodeServerBindings.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.inject.TypeLiteral; +import com.google.inject.multibindings.OptionalBinder; +import org.graylog.datanode.shared.system.activities.DataNodeActivityWriter; +import org.graylog2.cluster.ClusterConfigServiceImpl; +import org.graylog2.cluster.nodes.DataNodeClusterService; +import org.graylog2.cluster.nodes.DataNodeDto; +import org.graylog2.cluster.nodes.NodeService; +import org.graylog2.plugin.cluster.ClusterConfigService; +import org.graylog2.plugin.cluster.ClusterIdFactory; +import org.graylog2.plugin.cluster.RandomUUIDClusterIdFactory; +import org.graylog2.plugin.inject.Graylog2Module; +import org.graylog2.shared.system.activities.ActivityWriter; + +public class DatanodeServerBindings extends Graylog2Module { + + public DatanodeServerBindings() { + } + + @Override + protected void configure() { + bindInterfaces(); + bindSingletons(); + + bindDynamicFeatures(); + bindExceptionMappers(); + } + + + private void bindSingletons() { + bind(ClusterConfigService.class).to(ClusterConfigServiceImpl.class).asEagerSingleton(); + } + + private void bindInterfaces() { + bind(ActivityWriter.class).to(DataNodeActivityWriter.class); + OptionalBinder.newOptionalBinder(binder(), ClusterIdFactory.class).setDefault().to(RandomUUIDClusterIdFactory.class); + bind(new TypeLiteral>() {}).to(DataNodeClusterService.class); + } + + private void bindDynamicFeatures() { + jerseyDynamicFeatureBinder(); + } + + private void bindExceptionMappers() { + jerseyExceptionMapperBinder(); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/GenericBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/GenericBindings.java new file mode 100644 index 000000000000..53aea0988a93 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/GenericBindings.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.common.util.concurrent.ServiceManager; +import org.graylog.datanode.opensearch.CsrRequester; +import org.graylog.datanode.opensearch.CsrRequesterImpl; +import org.graylog.security.certutil.CaTruststore; +import org.graylog.security.certutil.CaTruststoreImpl; +import org.graylog2.plugin.inject.Graylog2Module; +import org.graylog2.security.CustomCAX509TrustManager; +import org.graylog2.shared.bindings.providers.ServiceManagerProvider; + +import javax.net.ssl.X509TrustManager; + +public class GenericBindings extends Graylog2Module { + private final boolean isMigrationCommand; + + public GenericBindings(boolean isMigrationCommand) { + this.isMigrationCommand = isMigrationCommand; + } + + @Override + protected void configure() { + bind(ServiceManager.class).toProvider(ServiceManagerProvider.class).asEagerSingleton(); + bind(X509TrustManager.class).to(CustomCAX509TrustManager.class).asEagerSingleton(); + bind(CaTruststore.class).to(CaTruststoreImpl.class).asEagerSingleton(); + bind(CsrRequester.class).to(CsrRequesterImpl.class).asEagerSingleton(); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/GenericInitializerBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/GenericInitializerBindings.java new file mode 100644 index 000000000000..3793ab25ce88 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/GenericInitializerBindings.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.common.util.concurrent.Service; +import com.google.inject.AbstractModule; +import com.google.inject.multibindings.Multibinder; +import org.graylog.datanode.initializers.PeriodicalsService; +import org.graylog.datanode.shutdown.GracefulShutdownService; +import org.graylog2.bootstrap.preflight.PreflightConfigService; +import org.graylog2.bootstrap.preflight.PreflightConfigServiceImpl; + +public class GenericInitializerBindings extends AbstractModule { + @Override + protected void configure() { + bind(PreflightConfigService.class).to(PreflightConfigServiceImpl.class); + + Multibinder serviceBinder = Multibinder.newSetBinder(binder(), Service.class); + serviceBinder.addBinding().to(PeriodicalsService.class); + serviceBinder.addBinding().to(GracefulShutdownService.class).asEagerSingleton(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/OpensearchProcessBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/OpensearchProcessBindings.java new file mode 100644 index 000000000000..f09ae986c598 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/OpensearchProcessBindings.java @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.common.util.concurrent.Service; +import com.google.inject.AbstractModule; +import com.google.inject.TypeLiteral; +import com.google.inject.multibindings.Multibinder; +import org.graylog.datanode.configuration.DatanodeTrustManagerProvider; +import org.graylog.datanode.configuration.OpensearchConfigurationService; +import org.graylog.datanode.configuration.variants.DatanodeKeystoreOpensearchCertificatesProvider; +import org.graylog.datanode.configuration.variants.LocalConfigurationCertificatesProvider; +import org.graylog.datanode.configuration.variants.NoOpensearchCertificatesProvider; +import org.graylog.datanode.configuration.variants.OpensearchCertificatesProvider; +import org.graylog.datanode.metrics.ConfigureMetricsIndexSettings; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.OpensearchProcessImpl; +import org.graylog.datanode.opensearch.OpensearchProcessService; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.opensearch.configuration.OpensearchUsableSpace; +import org.graylog.datanode.opensearch.configuration.OpensearchUsableSpaceProvider; +import org.graylog.datanode.opensearch.configuration.beans.impl.OpensearchClusterConfigurationBean; +import org.graylog.datanode.opensearch.configuration.beans.impl.OpensearchCommonConfigurationBean; +import org.graylog.datanode.opensearch.configuration.beans.impl.OpensearchDefaultConfigFilesBean; +import org.graylog.datanode.opensearch.configuration.beans.impl.OpensearchSecurityConfigurationBean; +import org.graylog.datanode.opensearch.configuration.beans.impl.SearchableSnapshotsConfigurationBean; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachineProvider; +import org.graylog.datanode.opensearch.statemachine.tracer.ClusterNodeStateTracer; +import org.graylog.datanode.opensearch.statemachine.tracer.OpensearchWatchdog; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTracer; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTransitionLogger; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationBean; + +public class OpensearchProcessBindings extends AbstractModule { + + @Override + protected void configure() { + + + Multibinder serviceBinder = Multibinder.newSetBinder(binder(), Service.class); + + bind(OpensearchProcess.class).to(OpensearchProcessImpl.class).asEagerSingleton(); + bind(OpensearchStateMachine.class).toProvider(OpensearchStateMachineProvider.class).asEagerSingleton(); + + bind(OpensearchUsableSpace.class).toProvider(OpensearchUsableSpaceProvider.class).asEagerSingleton(); + + //opensearch certificate providers + Multibinder opensearchCertificatesProviders = Multibinder.newSetBinder(binder(), OpensearchCertificatesProvider.class); + opensearchCertificatesProviders.addBinding().to(LocalConfigurationCertificatesProvider.class).asEagerSingleton(); + opensearchCertificatesProviders.addBinding().to(DatanodeKeystoreOpensearchCertificatesProvider.class).asEagerSingleton(); + opensearchCertificatesProviders.addBinding().to(NoOpensearchCertificatesProvider.class).asEagerSingleton(); + + + //opensearch configuration beans. The order of the beans is important here! + + Multibinder> opensearchConfigurationBeanMultibinder = Multibinder.newSetBinder(binder(), new TypeLiteral>() {}); + opensearchConfigurationBeanMultibinder.addBinding().to(OpensearchDefaultConfigFilesBean.class).asEagerSingleton(); + opensearchConfigurationBeanMultibinder.addBinding().to(OpensearchCommonConfigurationBean.class).asEagerSingleton(); + opensearchConfigurationBeanMultibinder.addBinding().to(OpensearchClusterConfigurationBean.class).asEagerSingleton(); + opensearchConfigurationBeanMultibinder.addBinding().to(SearchableSnapshotsConfigurationBean.class).asEagerSingleton(); + opensearchConfigurationBeanMultibinder.addBinding().to(OpensearchSecurityConfigurationBean.class).asEagerSingleton(); + + // this service both starts and provides the opensearch process + serviceBinder.addBinding().to(OpensearchConfigurationService.class).asEagerSingleton(); + serviceBinder.addBinding().to(OpensearchProcessService.class).asEagerSingleton(); + + bind(DatanodeTrustManagerProvider.class); + + // tracer + Multibinder tracerBinder = Multibinder.newSetBinder(binder(), StateMachineTracer.class); + tracerBinder.addBinding().to(ClusterNodeStateTracer.class).asEagerSingleton(); + tracerBinder.addBinding().to(OpensearchWatchdog.class).asEagerSingleton(); + tracerBinder.addBinding().to(StateMachineTransitionLogger.class).asEagerSingleton(); + tracerBinder.addBinding().to(ConfigureMetricsIndexSettings.class).asEagerSingleton(); + + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/PeriodicalBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/PeriodicalBindings.java new file mode 100644 index 000000000000..f0ff3bed4e6c --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/PeriodicalBindings.java @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.inject.AbstractModule; +import com.google.inject.multibindings.Multibinder; +import org.graylog.datanode.bootstrap.preflight.DataNodeCertRenewalPeriodical; +import org.graylog.datanode.bootstrap.preflight.DataNodeConfigurationPeriodical; +import org.graylog.datanode.periodicals.MetricsCollector; +import org.graylog.datanode.periodicals.NodePingPeriodical; +import org.graylog.datanode.periodicals.OpensearchNodeHeartbeat; +import org.graylog2.events.ClusterEventCleanupPeriodical; +import org.graylog2.events.ClusterEventPeriodical; +import org.graylog2.plugin.periodical.Periodical; + +public class PeriodicalBindings extends AbstractModule { + @Override + protected void configure() { + Multibinder periodicalBinder = Multibinder.newSetBinder(binder(), Periodical.class); + periodicalBinder.addBinding().to(ClusterEventPeriodical.class); + periodicalBinder.addBinding().to(ClusterEventCleanupPeriodical.class); + periodicalBinder.addBinding().to(OpensearchNodeHeartbeat.class); +// periodicalBinder.addBinding().to(UserSessionTerminationPeriodical.class); + periodicalBinder.addBinding().to(NodePingPeriodical.class); + periodicalBinder.addBinding().to(DataNodeConfigurationPeriodical.class); + periodicalBinder.addBinding().to(DataNodeCertRenewalPeriodical.class); + periodicalBinder.addBinding().to(MetricsCollector.class); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/PreflightChecksBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/PreflightChecksBindings.java new file mode 100644 index 000000000000..0f47e8502069 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/PreflightChecksBindings.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.inject.AbstractModule; +import com.google.inject.multibindings.MapBinder; +import org.graylog.datanode.bootstrap.preflight.DatanodeDirectoriesLockfileCheck; +import org.graylog.datanode.bootstrap.preflight.DatanodeDnsPreflightCheck; +import org.graylog.datanode.bootstrap.preflight.DatanodeKeystoreCheck; +import org.graylog.datanode.bootstrap.preflight.OpenSearchPreconditionsCheck; +import org.graylog.datanode.bootstrap.preflight.OpensearchBinPreflightCheck; +import org.graylog.datanode.bootstrap.preflight.OpensearchDataDirCompatibilityCheck; +import org.graylog.datanode.opensearch.CsrRequester; +import org.graylog.datanode.opensearch.CsrRequesterImpl; +import org.graylog2.bindings.providers.MongoConnectionProvider; +import org.graylog2.bootstrap.preflight.MongoDBPreflightCheck; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.graylog2.cluster.certificates.CertificateExchange; +import org.graylog2.cluster.certificates.CertificateExchangeImpl; +import org.graylog2.database.MongoConnection; + +public class PreflightChecksBindings extends AbstractModule { + + + @Override + protected void configure() { + bind(CsrRequester.class).to(CsrRequesterImpl.class).asEagerSingleton(); + bind(CertificateExchange.class).to(CertificateExchangeImpl.class); + + addPreflightCheck(MongoDBPreflightCheck.class); + addPreflightCheck(DatanodeDnsPreflightCheck.class); + addPreflightCheck(OpensearchBinPreflightCheck.class); + addPreflightCheck(DatanodeDirectoriesLockfileCheck.class); + addPreflightCheck(OpenSearchPreconditionsCheck.class); + addPreflightCheck(OpensearchDataDirCompatibilityCheck.class); + + // Mongodb is needed for legacy datanode storage, where we want to extract the certificate chain from + // mongodb and store it in local keystore + bind(MongoConnection.class).toProvider(MongoConnectionProvider.class); + addPreflightCheck(DatanodeKeystoreCheck.class); + } + + + protected void addPreflightCheck(Class preflightCheck) { + preflightChecksBinder().addBinding(preflightCheck.getCanonicalName()).to(preflightCheck); + } + + protected MapBinder preflightChecksBinder() { + return MapBinder.newMapBinder(binder(), String.class, PreflightCheck.class); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bindings/SchedulerBindings.java b/data-node/src/main/java/org/graylog/datanode/bindings/SchedulerBindings.java new file mode 100644 index 000000000000..02e1365570a0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bindings/SchedulerBindings.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bindings; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.inject.AbstractModule; +import com.google.inject.name.Names; +import org.graylog2.periodical.Periodicals; +import org.graylog2.plugin.Tools; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +public class SchedulerBindings extends AbstractModule { + private static final Logger LOG = LoggerFactory.getLogger(SchedulerBindings.class); + private static final int SCHEDULED_THREADS_POOL_SIZE = 30; + + @Override + protected void configure() { + // TODO Add instrumentation to ExecutorService and ThreadFactory + final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(SCHEDULED_THREADS_POOL_SIZE, + new ThreadFactoryBuilder() + .setNameFormat("scheduled-%d") + .setDaemon(false) + .setUncaughtExceptionHandler(new Tools.LogUncaughtExceptionHandler(LOG)) + .build() + ); + + bind(ScheduledExecutorService.class).annotatedWith(Names.named("scheduler")).toInstance(scheduler); + + // TODO Add instrumentation to ExecutorService and ThreadFactory + final ScheduledExecutorService daemonScheduler = Executors.newScheduledThreadPool(SCHEDULED_THREADS_POOL_SIZE, + new ThreadFactoryBuilder() + .setNameFormat("scheduled-daemon-%d") + .setDaemon(true) + .setUncaughtExceptionHandler(new Tools.LogUncaughtExceptionHandler(LOG)) + .build() + ); + + bind(ScheduledExecutorService.class).annotatedWith(Names.named("daemonScheduler")).toInstance(daemonScheduler); + bind(Periodicals.class).toInstance(new Periodicals(scheduler, daemonScheduler)); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/DatanodeBootstrap.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/DatanodeBootstrap.java new file mode 100644 index 000000000000..d12e34248940 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/DatanodeBootstrap.java @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap; + +import com.google.common.util.concurrent.ServiceManager; +import com.google.inject.Binder; +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.Module; +import com.google.inject.ProvisionException; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.bindings.ConfigurationModule; +import org.graylog.datanode.bindings.DatanodeConfigurationBindings; +import org.graylog.datanode.bindings.GenericBindings; +import org.graylog.datanode.bindings.GenericInitializerBindings; +import org.graylog.datanode.bindings.OpensearchProcessBindings; +import org.graylog.datanode.bindings.PreflightChecksBindings; +import org.graylog.datanode.bootstrap.plugin.DatanodePluginLoader; +import org.graylog.datanode.bootstrap.preflight.PreflightClusterConfigurationModule; +import org.graylog2.bindings.NamedConfigParametersOverrideModule; +import org.graylog2.bootstrap.preflight.PreflightCheckService; +import org.graylog2.commands.AbstractNodeCommand; +import org.graylog2.plugin.Plugin; +import org.graylog2.plugin.PluginLoaderConfig; +import org.graylog2.plugin.Tools; +import org.graylog2.shared.bindings.IsDevelopmentBindings; +import org.graylog2.shared.plugins.ChainingClassLoader; +import org.graylog2.shared.plugins.PluginLoader; +import org.graylog2.shared.system.activities.Activity; +import org.graylog2.shared.system.activities.ActivityWriter; +import org.jsoftbiz.utils.OS; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.stream.Collectors; + +public abstract class DatanodeBootstrap extends AbstractNodeCommand { + private static final Logger LOG = LoggerFactory.getLogger(DatanodeBootstrap.class); + protected Configuration configuration; + + protected DatanodeBootstrap(String commandName, Configuration configuration) { + super(commandName, configuration); + this.commandName = commandName; + this.configuration = configuration; + } + + protected abstract void startNodeRegistration(Injector injector); + + @Override + protected void beforeInjectorCreation(Set plugins) { + runPreFlightChecks(plugins); + } + + private void runPreFlightChecks(Set plugins) { + if (configuration.getSkipPreflightChecks()) { + LOG.info("Skipping preflight checks"); + return; + } + + final List preflightCheckModules = plugins.stream().map(Plugin::preflightCheckModules) + .flatMap(Collection::stream).collect(Collectors.toList()); + + getPreflightInjector(preflightCheckModules).getInstance(PreflightCheckService.class).runChecks(); + } + + private Injector getPreflightInjector(List preflightCheckModules) { + return Guice.createInjector( + new IsDevelopmentBindings(), + new PreflightClusterConfigurationModule(chainingClassLoader), + new NamedConfigParametersOverrideModule(jadConfig.getConfigurationBeans()), + new ConfigurationModule(configuration), + new PreflightChecksBindings(), + new DatanodeConfigurationBindings(), + new Module() { + @Override + public void configure(Binder binder) { + preflightCheckModules.forEach(binder::install); + } + }); + } + + @Override + protected void startCommand() { + final String systemInformation = Tools.getSystemInformation(); + + final OS os = OS.getOs(); + + LOG.info("Graylog Data Node {} starting up (command: {})", version, commandName); + LOG.info("JRE: {}", systemInformation); + LOG.info("Deployment: {}", configuration.getInstallationSource()); + LOG.info("OS: {}", os.getPlatformName()); + LOG.info("Arch: {}", os.getArch()); + + startNodeRegistration(injector); + + final ActivityWriter activityWriter; + final ServiceManager serviceManager; + try { + activityWriter = injector.getInstance(ActivityWriter.class); + serviceManager = injector.getInstance(ServiceManager.class); + } catch (ProvisionException e) { + LOG.error("Guice error", e); + annotateProvisionException(e); + System.exit(-1); + return; + } catch (Exception e) { + LOG.error("Unexpected exception", e); + System.exit(-1); + return; + } + + Runtime.getRuntime().addShutdownHook(new Thread(injector.getInstance(shutdownHook()))); + + // Start services. + try { + serviceManager.startAsync().awaitHealthy(); + } catch (Exception e) { + try { + serviceManager.stopAsync().awaitStopped(configuration.getShutdownTimeout(), TimeUnit.MILLISECONDS); + } catch (TimeoutException timeoutException) { + LOG.error("Unable to shutdown properly on time. {}", serviceManager.servicesByState()); + } + LOG.error("Graylog DataNode startup failed. Exiting. Exception was:", e); + System.exit(-1); + } + LOG.info("Services started, startup times in ms: {}", serviceManager.startupTimes()); + + activityWriter.write(new Activity("Started up.", Main.class)); + LOG.info("Graylog DataNode {} up and running.", commandName); + + // Block forever. + try { + Thread.currentThread().join(); + } catch (InterruptedException e) { + } + } + + @Override + protected List getSharedBindingsModules() { + final List result = super.getSharedBindingsModules(); + result.add(new GenericBindings(isMigrationCommand())); + result.add(new GenericInitializerBindings()); + result.add(new OpensearchProcessBindings()); + result.add(new DatanodeConfigurationBindings()); + + return result; + } + + protected void annotateProvisionException(ProvisionException e) { + annotateInjectorExceptions(e.getErrorMessages()); + throw e; + } + + protected abstract Class shutdownHook(); + + @Override + protected PluginLoader getPluginLoader(PluginLoaderConfig pluginLoaderConfig, ChainingClassLoader classLoader) { + return new DatanodePluginLoader(pluginLoaderConfig.getPluginDir().toFile(), classLoader); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/Main.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/Main.java new file mode 100644 index 000000000000..67b1156808cd --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/Main.java @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap; + +import com.github.rvesse.airline.Cli; +import com.github.rvesse.airline.builder.CliBuilder; +import com.google.common.collect.ImmutableSet; +import org.graylog.datanode.bootstrap.commands.CliCommandHelp; +import org.graylog.datanode.bootstrap.commands.ShowVersion; +import org.graylog.security.certutil.CertutilCa; +import org.graylog.security.certutil.CertutilCert; +import org.graylog.security.certutil.CertutilCsr; +import org.graylog.security.certutil.CertutilCsrSign; +import org.graylog.security.certutil.CertutilHttp; +import org.graylog.security.certutil.CertutilTruststore; +import org.graylog2.bootstrap.CliCommand; +import org.graylog2.bootstrap.CliCommandsProvider; + +import java.util.ServiceLoader; + +public class Main { + public static void main(String[] args) { + final CliBuilder builder = Cli.builder("graylog") + .withDescription("Open source, centralized log management") + .withDefaultCommand(CliCommandHelp.class) + .withCommands(ImmutableSet.of( + CertutilCa.class, + CertutilCert.class, + CertutilHttp.class, + CertutilCsr.class, + CertutilCsrSign.class, + CertutilTruststore.class, + ShowVersion.class, + CliCommandHelp.class)); + + // add rest from classpath + final ServiceLoader commandsProviders = ServiceLoader.load(CliCommandsProvider.class); + for (CliCommandsProvider provider : commandsProviders) { + provider.addTopLevelCommandsOrGroups(builder); + } + + final Cli cli = builder.build(); + final Runnable command = cli.parse(args); + + command.run(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/commands/CliCommandHelp.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/commands/CliCommandHelp.java new file mode 100644 index 000000000000..581c32dade83 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/commands/CliCommandHelp.java @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.commands; + +import com.github.rvesse.airline.help.Help; +import org.graylog2.bootstrap.CliCommand; + +/* shallow subclass to make it implement CliCommand */ +public class CliCommandHelp extends Help implements CliCommand { +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/commands/ShowVersion.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/commands/ShowVersion.java new file mode 100644 index 000000000000..f01855770548 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/commands/ShowVersion.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.commands; + +import com.github.rvesse.airline.annotations.Command; +import org.graylog2.bootstrap.CliCommand; +import org.graylog2.plugin.Tools; +import org.graylog2.plugin.Version; + +@Command(name = "version", description = "Show the Graylog and JVM versions") +public class ShowVersion implements CliCommand { + private final Version version = Version.CURRENT_CLASSPATH; + + @Override + public void run() { + System.out.println("Graylog " + version); + System.out.println("JRE: " + Tools.getSystemInformation()); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePlugin.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePlugin.java new file mode 100644 index 000000000000..3307210f7731 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePlugin.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.plugin; + +import org.graylog2.plugin.Plugin; + +/** + * Service loader interface for data node specific plugins + */ +public interface DatanodePlugin extends Plugin { +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePluginBootstrapConfig.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePluginBootstrapConfig.java new file mode 100644 index 000000000000..710e315dc29a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePluginBootstrapConfig.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.plugin; + +import org.graylog2.plugin.PluginBootstrapConfig; + +/** + * A configuration bean to be processed by {@link com.github.joschi.jadconfig.JadConfig} for data node plugins. + *

+ * Configurations implementing this interface should be loaded via the {@link java.util.ServiceLoader} mechanism. See + * {@link DatanodePluginLoader#loadPluginBootstrapConfigs()}. + */ +public interface DatanodePluginBootstrapConfig extends PluginBootstrapConfig { +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePluginLoader.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePluginLoader.java new file mode 100644 index 000000000000..c081f8e395d2 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/plugin/DatanodePluginLoader.java @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.plugin; + +import com.google.common.collect.ImmutableSet; +import org.graylog2.plugin.Plugin; +import org.graylog2.plugin.PluginBootstrapConfig; +import org.graylog2.shared.plugins.ChainingClassLoader; +import org.graylog2.shared.plugins.PluginLoader; + +import java.io.File; +import java.util.ServiceLoader; +import java.util.Set; + +public class DatanodePluginLoader extends PluginLoader { + + public DatanodePluginLoader(File pluginDir, ChainingClassLoader classLoader) { + super(pluginDir, classLoader); + } + + @Override + @SuppressWarnings("unchecked") + public Set loadPluginBootstrapConfigs() { + return ImmutableSet.copyOf( + (ServiceLoader) (ServiceLoader) + ServiceLoader.load(DatanodePluginBootstrapConfig.class, classLoader) + ); + } + + @Override + @SuppressWarnings("unchecked") + protected Iterable loadClassPathPlugins() { + return (ServiceLoader) (ServiceLoader) + ServiceLoader.load(DatanodePlugin.class); + } + + @Override + @SuppressWarnings("unchecked") + protected Iterable loadJarPlugins() { + return ImmutableSet.copyOf( + (ServiceLoader) (ServiceLoader) + ServiceLoader.load(DatanodePlugin.class, classLoader) + ); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DataNodeCertRenewalPeriodical.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DataNodeCertRenewalPeriodical.java new file mode 100644 index 000000000000..c247ab7f3e53 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DataNodeCertRenewalPeriodical.java @@ -0,0 +1,159 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.opensearch.CsrRequester; +import org.graylog2.bootstrap.preflight.PreflightConfigResult; +import org.graylog2.bootstrap.preflight.PreflightConfigService; +import org.graylog2.plugin.certificates.RenewalPolicy; +import org.graylog2.plugin.cluster.ClusterConfigService; +import org.graylog2.plugin.periodical.Periodical; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.time.Instant; +import java.util.Date; +import java.util.Optional; +import java.util.function.Supplier; + +@Singleton +public class DataNodeCertRenewalPeriodical extends Periodical { + private static final Logger LOG = LoggerFactory.getLogger(DataNodeCertRenewalPeriodical.class); + public static final Duration PERIODICAL_DURATION = Duration.ofSeconds(2); + public static final Duration CSR_TRIGGER_PERIOD_LIMIT = Duration.ofMinutes(5); + + private final DatanodeKeystore datanodeKeystore; + private final Supplier renewalPolicySupplier; + + private final CsrRequester csrRequester; + + private final Supplier isServerInPreflightMode; + + private Instant lastCsrRequest; + + @Inject + public DataNodeCertRenewalPeriodical(DatanodeKeystore datanodeKeystore, ClusterConfigService clusterConfigService, CsrRequester csrRequester, PreflightConfigService preflightConfigService) { + this(datanodeKeystore, () -> clusterConfigService.get(RenewalPolicy.class), csrRequester, () -> isInPreflight(preflightConfigService)); + } + + protected DataNodeCertRenewalPeriodical(DatanodeKeystore datanodeKeystore, Supplier renewalPolicySupplier, CsrRequester csrRequester, Supplier isServerInPreflightMode) { + this.datanodeKeystore = datanodeKeystore; + this.renewalPolicySupplier = renewalPolicySupplier; + this.csrRequester = csrRequester; + this.isServerInPreflightMode = isServerInPreflightMode; + } + + @Override + public void doRun() { + if (isServerInPreflightMode.get()) { + // we don't want to automatically trigger CSRs during preflight, don't run it if the preflight is still not finished or skipped + LOG.debug("Datanode still in preflight mode, skipping cert renewal task"); + return; + } + + // always check if there are any certificates that we can accept + getRenewalPolicy() + .filter(this::needsNewCertificate) + .ifPresent(renewalPolicy -> { + switch (renewalPolicy.mode()) { + case AUTOMATIC -> automaticRenewal(); + case MANUAL -> manualRenewal(); + } + }); + } + + private static boolean isInPreflight(PreflightConfigService preflightConfigService) { + return preflightConfigService.getPreflightConfigResult() != PreflightConfigResult.FINISHED; + } + + private void manualRenewal() { + LOG.debug("Manual renewal, ignoring on the datanode side for now"); + } + + private void automaticRenewal() { + final Instant now = Instant.now(); + if (lastCsrRequest == null || now.minus(CSR_TRIGGER_PERIOD_LIMIT).isAfter(lastCsrRequest)) { + lastCsrRequest = now; + csrRequester.triggerCertificateSigningRequest(); + } + } + + private boolean needsNewCertificate(RenewalPolicy renewalPolicy) { + final Date expiration = datanodeKeystore.getCertificateExpiration(); + return expiration == null || expiresSoon(expiration, renewalPolicy); + } + + private boolean expiresSoon(Date expiration, RenewalPolicy renewalPolicy) { + Duration threshold = renewalPolicy.getRenewalThreshold(); + final Instant renewalMoment = expiration.toInstant() + .minus(threshold) + .minus(PERIODICAL_DURATION); + final boolean expiresSoon = Instant.now().isAfter(renewalMoment); + if (expiresSoon) { + LOG.info("Datanode certificate will be renewed now, expiring soon (" + expiration + ")"); + } + return expiresSoon; + } + + private Optional getRenewalPolicy() { + return Optional.ofNullable(renewalPolicySupplier.get()); + } + + @Override + protected Logger getLogger() { + return LOG; + } + + @Override + public boolean runsForever() { + return false; + } + + @Override + public boolean stopOnGracefulShutdown() { + return true; + } + + @Override + public boolean leaderOnly() { + return false; + } + + @Override + public boolean startOnThisNode() { + return true; + } + + @Override + public boolean isDaemon() { + return true; + } + + @Override + public int getInitialDelaySeconds() { + return 0; + } + + @Override + public int getPeriodSeconds() { + return (int) PERIODICAL_DURATION.toSeconds(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DataNodeConfigurationPeriodical.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DataNodeConfigurationPeriodical.java new file mode 100644 index 000000000000..08cc4520e9c3 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DataNodeConfigurationPeriodical.java @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import org.graylog2.plugin.periodical.Periodical; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Singleton +public class DataNodeConfigurationPeriodical extends Periodical { + private static final Logger LOG = LoggerFactory.getLogger(DataNodeConfigurationPeriodical.class); + private final DatanodeCertReceiver datanodeCertReceiver; + + @Inject + public DataNodeConfigurationPeriodical(DatanodeCertReceiver datanodeCertReceiver) { + this.datanodeCertReceiver = datanodeCertReceiver; + } + + @Override + public void doRun() { + // always check if there are any certificates that we can accept + datanodeCertReceiver.pollCertificate(); + } + + @Override + protected Logger getLogger() { + return LOG; + } + + @Override + public boolean runsForever() { + return false; + } + + @Override + public boolean stopOnGracefulShutdown() { + return true; + } + + @Override + public boolean leaderOnly() { + return false; + } + + @Override + public boolean startOnThisNode() { + return true; + } + + @Override + public boolean isDaemon() { + return true; + } + + @Override + public int getInitialDelaySeconds() { + return 0; + } + + @Override + public int getPeriodSeconds() { + return 2; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeCertReceiver.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeCertReceiver.java new file mode 100644 index 000000000000..06552767dff3 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeCertReceiver.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import jakarta.inject.Inject; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.configuration.DatanodeKeystoreException; +import org.graylog.security.certutil.cert.CertificateChain; +import org.graylog2.cluster.certificates.CertificateExchange; +import org.graylog2.plugin.system.NodeId; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DatanodeCertReceiver { + + private static final Logger LOG = LoggerFactory.getLogger(DatanodeCertReceiver.class); + + private final NodeId nodeId; + + private final CertificateExchange certificateExchange; + private final DatanodeKeystore datanodeKeystore; + + @Inject + public DatanodeCertReceiver(NodeId nodeId, CertificateExchange certificateExchange, DatanodeKeystore datanodeKeystore) { + this.nodeId = nodeId; + this.certificateExchange = certificateExchange; + this.datanodeKeystore = datanodeKeystore; + } + + public void pollCertificate() { + // always check if there are any certificates that we can accept + certificateExchange.pollCertificate(nodeId.getNodeId(), this::processCertificateChain); + } + + private void processCertificateChain(CertificateChain certificateChain) { + try { + LOG.info("Received new datanode certificate, updating datanode keystore"); + datanodeKeystore.replaceCertificatesInKeystore(certificateChain); + } catch (DatanodeKeystoreException e) { + throw new RuntimeException(e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeDirectoriesLockfileCheck.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeDirectoriesLockfileCheck.java new file mode 100644 index 000000000000..2aa537fd8090 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeDirectoriesLockfileCheck.java @@ -0,0 +1,131 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.google.common.io.CharStreams; +import jakarta.inject.Inject; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.configuration.DatanodeDirectories; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.graylog2.plugin.system.NodeId; + +import java.io.IOException; +import java.io.Reader; +import java.io.Writer; +import java.nio.channels.Channels; +import java.nio.channels.FileChannel; +import java.nio.channels.FileLock; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Objects; +import java.util.Optional; + +/** + * To prevent two or more datanodes using the same directories, we are writing a datanode.lock files into the dirs. + * The content of the lock file is the nodeid of the datanode. During startup we are checking that the directory + * has either no lock (and then we create one) or if it has a lock, then we verify its content against current nodeid. + */ +public class DatanodeDirectoriesLockfileCheck implements PreflightCheck { + + protected static final Path DATANODE_LOCKFILE = Path.of("datanode.lock"); + private final DatanodeDirectories directories; + private final String nodeId; + + @Inject + public DatanodeDirectoriesLockfileCheck(NodeId nodeId, DatanodeConfiguration datanodeConfiguration) { + this(nodeId.getNodeId(), datanodeConfiguration.datanodeDirectories()); + } + + public DatanodeDirectoriesLockfileCheck(String nodeId, DatanodeDirectories directories) { + this.directories = directories; + this.nodeId = nodeId; + } + + @Override + public void runCheck() throws PreflightCheckException { + checkDatanodeLock(directories.getConfigurationTargetDir()); + checkDatanodeLock(directories.getLogsTargetDir()); + } + + public void checkDatanodeLock(Path dir) { + final Path lockfile = dir.resolve(DATANODE_LOCKFILE); + try (FileChannel channel = FileChannel.open(lockfile, StandardOpenOption.READ, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { + doCheckLockFile(channel, dir); + } catch (IOException e) { + throw new DatanodeLockFileException("Failed to open channel to lock file " + lockfile.toAbsolutePath(), e); + } + } + + private void doCheckLockFile(FileChannel channel, Path dir) { + FileLock lock = null; + try { + lock = channel.lock(); + // now we are the only process that can access the lock file. + verifyOrCreateLockFile(channel, dir); + } catch (IOException e) { + throw new DatanodeLockFileException("Failed to obtain lock", e); + } finally { + releaseLock(lock); + } + } + + private void verifyOrCreateLockFile(FileChannel channel, Path dir) { + readChannel(channel).ifPresentOrElse( + lockedForID -> verifyLockFileContent(lockedForID, dir), + () -> writeLockFile(channel) + ); + } + + private void releaseLock(FileLock lock) { + try { + if (lock != null) { + lock.release(); + } + } catch (IOException e) { + throw new DatanodeLockFileException("Failed to release lock file", e); + } + } + + private void verifyLockFileContent(String lockedForID, Path dir) { + if (!Objects.equals(lockedForID, nodeId)) { + throw new DatanodeLockFileException("Directory " + dir + " locked for datanode " + lockedForID + ", access with datanode " + nodeId + " rejected. Please check your configuration and make sure that there is only one datanode instance using this directory."); + } + } + + private void writeLockFile(FileChannel channel) { + try { + // do not close the writer, otherwise it will close the underlying channel. We do that explicitly elsewhere. + final Writer writer = Channels.newWriter(channel, StandardCharsets.UTF_8); + writer.write(nodeId); + writer.flush(); + } catch (IOException e) { + throw new DatanodeLockFileException("Failed to write node ID to the lock file", e); + } + } + + private Optional readChannel(FileChannel channel) { + try { + // do not close the reader, otherwise it will close the underlying channel. We do that explicitly elsewhere. + final Reader reader = Channels.newReader(channel, StandardCharsets.UTF_8); + return Optional.of(CharStreams.toString(reader)).filter(v -> !v.isBlank()).map(String::trim); + } catch (IOException e) { + throw new DatanodeLockFileException("Failed to read content of lock file", e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeDnsPreflightCheck.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeDnsPreflightCheck.java new file mode 100644 index 000000000000..f9d2dcc83ca5 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeDnsPreflightCheck.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import jakarta.inject.Inject; +import org.graylog.datanode.Configuration; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.List; +import java.util.stream.Stream; + +public class DatanodeDnsPreflightCheck implements PreflightCheck { + + private static final Logger LOG = LoggerFactory.getLogger(DatanodeDnsPreflightCheck.class); + + private final String configuredHostname; + + @Inject + public DatanodeDnsPreflightCheck(Configuration datanodeConfiguration) { + configuredHostname = datanodeConfiguration.getHostname(); + } + + @Override + public void runCheck() throws PreflightCheckException { + try { + final InetAddress[] addresses = InetAddress.getAllByName(configuredHostname); + final List ips = Stream.of(addresses).map(InetAddress::getHostAddress).toList(); + LOG.debug("Datanode host {} is available on {} addresses", configuredHostname, ips); + } catch (UnknownHostException e) { + throw new PreflightCheckException("Configured hostname " + configuredHostname + " is not bound to any address! Please configure your DNS so the hostname points to this machine"); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeKeystoreCheck.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeKeystoreCheck.java new file mode 100644 index 000000000000..5c89c3ca5f12 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeKeystoreCheck.java @@ -0,0 +1,157 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.github.rholder.retry.Attempt; +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.RetryListener; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import jakarta.inject.Inject; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.configuration.DatanodeKeystoreException; +import org.graylog.datanode.opensearch.CsrRequester; +import org.graylog.security.certutil.CertRequest; +import org.graylog.security.certutil.CertificateGenerator; +import org.graylog.security.certutil.KeyPair; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.graylog2.plugin.certificates.RenewalPolicy; +import org.graylog2.plugin.cluster.ClusterConfigService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.security.KeyStore; +import java.time.Duration; +import java.util.Date; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +/** + * This check verifies that each datanode has a private key configured. It may be needed right now, but at the moment + * we'll start provisioning and create certificate signing requests, we may be sure that there is one private key + * available. + * ·
+ * Additionally, this check is able to restore existing keystore persisted in mongodb. It may hold valid certificate + * that we want to reuse. Otherwise, the node would undergo signing again. + */ +public class DatanodeKeystoreCheck implements PreflightCheck { + + private static final Logger LOG = LoggerFactory.getLogger(DatanodeKeystoreCheck.class); + public static final Duration DEFAULT_SELFSIGNED_CERT_VALIDITY = Duration.ofDays(99 * 365); + + private final DatanodeKeystore datanodeKeystore; + private final LegacyDatanodeKeystoreProvider legacyDatanodeKeystoreProvider; + private final CsrRequester csrRequester; + private final DatanodeCertReceiver datanodeCertReceiver; + + private final ClusterConfigService clusterConfigService; + + @Inject + public DatanodeKeystoreCheck( + DatanodeKeystore datanodeKeystore, + LegacyDatanodeKeystoreProvider legacyDatanodeKeystoreProvider, + CsrRequester csrRequester, + DatanodeCertReceiver datanodeCertReceiver, ClusterConfigService clusterConfigService + + ) { + this.datanodeKeystore = datanodeKeystore; + this.legacyDatanodeKeystoreProvider = legacyDatanodeKeystoreProvider; + this.csrRequester = csrRequester; + this.datanodeCertReceiver = datanodeCertReceiver; + this.clusterConfigService = clusterConfigService; + } + + @Override + public void runCheck() throws PreflightCheckException { + if (!datanodeKeystore.exists()) { + LOG.info("Creating keystore for this data node"); + try { + final Optional legacyKeystore = legacyDatanodeKeystoreProvider.get(); + if (legacyKeystore.isPresent()) { // remove this branch latest with 7.0 release + LOG.info("Legacy keystore discovered, converting to local file"); + datanodeKeystore.create(legacyKeystore.get()); + legacyDatanodeKeystoreProvider.deleteLocalPrivateKey(); + } else { + datanodeKeystore.create(generateKeyPair()); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + } else { + LOG.debug("Private key for this data node already exists, skipping creation."); + checkCertificateRenewal(); + } + } + + private void checkCertificateRenewal() { + try { + if (isAutomaticRenewal() && datanodeKeystore.hasSignedCertificate()) { + final Date expiration = datanodeKeystore.getCertificateExpiration(); + final Date now = new Date(); + final boolean expired = now.after(expiration); + if (expired) { + LOG.info("Datanode certificate expired on {}. Requesting and awaiting new certificate", expiration); + csrRequester.triggerCertificateSigningRequest(); + waitForCertificateRenewal(); + } + } + } catch (DatanodeKeystoreException e) { + throw new RuntimeException(e); + } + } + + private boolean isAutomaticRenewal() { + return Optional.ofNullable(this.clusterConfigService.get(RenewalPolicy.class)) + .map(RenewalPolicy::mode) + .filter(RenewalPolicy.Mode.AUTOMATIC::equals) + .isPresent(); + } + + private void waitForCertificateRenewal() { + try { + RetryerBuilder.newBuilder() + .retryIfResult(expiration -> { + final Date now = new Date(); + return now.after(expiration); + }) + .withStopStrategy(StopStrategies.neverStop()) + .withWaitStrategy(WaitStrategies.fixedWait(5, TimeUnit.SECONDS)) + .withRetryListener(new RetryListener() { + @Override + public void onRetry(Attempt attempt) { + LOG.info("Waiting for datanode certificate renewal, retry #{}", attempt.getAttemptNumber()); + datanodeCertReceiver.pollCertificate(); + } + }) + .build() + .call(datanodeKeystore::getCertificateExpiration); + } catch (ExecutionException | RetryException e) { + throw new RuntimeException(e); + } + } + + private static KeyPair generateKeyPair() throws Exception { + final CertRequest certRequest = CertRequest.selfSigned(DatanodeKeystore.DATANODE_KEY_ALIAS) + .isCA(false) + .validity(DEFAULT_SELFSIGNED_CERT_VALIDITY); + + return CertificateGenerator.generate(certRequest); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeLockFileException.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeLockFileException.java new file mode 100644 index 000000000000..0433b8bfda99 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/DatanodeLockFileException.java @@ -0,0 +1,27 @@ + /* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +public class DatanodeLockFileException extends RuntimeException { + public DatanodeLockFileException(String message, Exception cause) { + super(message, cause); + } + + public DatanodeLockFileException(String message) { + super(message); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/LegacyDatanodeKeystoreProvider.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/LegacyDatanodeKeystoreProvider.java new file mode 100644 index 000000000000..4d1f469c44d0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/LegacyDatanodeKeystoreProvider.java @@ -0,0 +1,153 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.bson.Document; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.configuration.DatanodeDirectories; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.configuration.DatanodeKeystoreException; +import org.graylog.security.certutil.CertConstants; +import org.graylog.security.certutil.ca.exceptions.KeyStoreStorageException; +import org.graylog2.database.MongoConnection; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.security.encryption.EncryptedValue; +import org.graylog2.security.encryption.EncryptedValueService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.util.Base64; +import java.util.Optional; + +import static com.mongodb.client.model.Filters.eq; +import static org.graylog.security.certutil.CertConstants.PKCS12; + +/** + * Remove in 7.0 release. All legacy mongodb stored keystores for datanodes should be migrated to local files + * or recreated from scratch. + */ +@Deprecated(forRemoval = true) +public class LegacyDatanodeKeystoreProvider { + + private static final Logger LOG = LoggerFactory.getLogger(LegacyDatanodeKeystoreProvider.class); + public static final String LEGACY_COLLECTION_NAME = "data_node_certificates"; + public static final String ENCRYPTED_CERTIFICATE_FIELD = "encrypted_certificate_keystore"; + public static final String LEGACY_KEY_ALIAS = "datanode"; + + private final NodeId nodeId; + private final String passwordSecret; + + private final DatanodeDirectories datanodeDirectories; + + + private static final String ENCRYPTED_VALUE_SUBFIELD = "encrypted_value"; + private static final String SALT_SUBFIELD = "salt"; + + private final MongoDatabase mongoDatabase; + private final EncryptedValueService encryptionService; + + @Inject + public LegacyDatanodeKeystoreProvider(NodeId nodeId, final @Named("password_secret") String passwordSecret, DatanodeConfiguration datanodeConfiguration, final MongoConnection mongoConnection, EncryptedValueService encryptionService) { + this.nodeId = nodeId; + this.passwordSecret = passwordSecret; + this.datanodeDirectories = datanodeConfiguration.datanodeDirectories(); + this.mongoDatabase = mongoConnection.getMongoDatabase(); + this.encryptionService = encryptionService; + } + + public Optional get() throws KeyStoreStorageException { + return loadKeystore().filter(this::isValidKeyAndCert); + } + + private boolean isValidKeyAndCert(KeyStore keystore) { + try { + return hasPrivateKey(keystore) && DatanodeKeystore.isSignedCertificateChain(keystore); + } catch (KeyStoreException | NoSuchAlgorithmException | UnrecoverableKeyException | + DatanodeKeystoreException e) { + LOG.warn("Failed to obtain legacy keystore, ignoring it", e); + return false; + } + } + + private boolean hasPrivateKey(KeyStore keystore) throws KeyStoreException, NoSuchAlgorithmException, UnrecoverableKeyException { + return keystore.getKey(LEGACY_KEY_ALIAS, passwordSecret.toCharArray()) != null; + } + + private Optional loadKeystore() throws KeyStoreStorageException { + final Optional keystoreAsString = readEncodedCertFromDatabase(); + if (keystoreAsString.isPresent()) { + try (ByteArrayInputStream bais = new ByteArrayInputStream(Base64.getDecoder().decode(keystoreAsString.get()))) { + KeyStore keyStore = KeyStore.getInstance(PKCS12); + keyStore.load(bais, passwordSecret.toCharArray()); + return Optional.of(keyStore); + } catch (Exception ex) { + throw new KeyStoreStorageException("Failed to load keystore from Mongo collection for node " + nodeId.getNodeId(), ex); + } + } + return Optional.empty(); + } + + private Optional readEncodedCertFromDatabase() { + MongoCollection dbCollection = mongoDatabase.getCollection(LEGACY_COLLECTION_NAME); + final FindIterable objects = dbCollection.find( + eq( + "node_id", + nodeId.getNodeId() + ) + ); + final Document nodeCertificate = objects.first(); + + if (nodeCertificate != null) { + final Document encryptedCertificateDocument = nodeCertificate.get(ENCRYPTED_CERTIFICATE_FIELD, Document.class); + if (encryptedCertificateDocument != null) { + final EncryptedValue encryptedCertificate = EncryptedValue.builder() + .value(encryptedCertificateDocument.getString(ENCRYPTED_VALUE_SUBFIELD)) + .salt(encryptedCertificateDocument.getString(SALT_SUBFIELD)) + .isDeleteValue(false) + .isKeepValue(false) + .build(); + + return Optional.ofNullable(encryptionService.decrypt(encryptedCertificate)); + } + } + return Optional.empty(); + } + + public void deleteLocalPrivateKey() { + final Path localPrivateKey = datanodeDirectories.getConfigurationTargetDir().resolve("privateKey.cert"); + if (Files.exists(localPrivateKey)) { + try { + Files.delete(localPrivateKey); + } catch (IOException e) { + LOG.warn("Failed to delete legacy datanode private key", e); + } + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpenSearchPreconditionsCheck.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpenSearchPreconditionsCheck.java new file mode 100644 index 000000000000..0fecf1ec0cec --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpenSearchPreconditionsCheck.java @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.sun.jna.Platform; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import oshi.PlatformEnum; +import oshi.util.FileUtil; + +import static org.graylog2.shared.utilities.StringUtils.f; + +/** + * Check required system parameters. + */ +public class OpenSearchPreconditionsCheck implements PreflightCheck { + private static final Logger LOG = LoggerFactory.getLogger(OpenSearchPreconditionsCheck.class); + + // See: https://opensearch.org/docs/2.11/install-and-configure/install-opensearch/index/#important-settings + private static final long MAX_MAP_COUNT_MIN = 262144L; + private static final String PROC_SYS_VM_MAX_MAP_COUNT_PATH = "/proc/sys/vm/max_map_count"; + + @Override + public void runCheck() throws PreflightCheckException { + if (PlatformEnum.getValue(Platform.getOSType()) != PlatformEnum.LINUX) { + LOG.debug("Check only supports Linux operating systems"); + return; + } + + final var vmMaxMapCount = FileUtil.getLongFromFile(PROC_SYS_VM_MAX_MAP_COUNT_PATH); + + if (vmMaxMapCount == 0) { + LOG.warn("Couldn't read value from {}", PROC_SYS_VM_MAX_MAP_COUNT_PATH); + } else if (vmMaxMapCount < MAX_MAP_COUNT_MIN) { + throw new PreflightCheckException(f("%s value should be at least %d but is %d (set via \"vm.max_map_count\" sysctl)", + PROC_SYS_VM_MAX_MAP_COUNT_PATH, MAX_MAP_COUNT_MIN, vmMaxMapCount)); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpensearchBinPreflightCheck.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpensearchBinPreflightCheck.java new file mode 100644 index 000000000000..5cfd201cd2ea --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpensearchBinPreflightCheck.java @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import org.graylog.datanode.OpensearchDistribution; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Inject; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Optional; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +public class OpensearchBinPreflightCheck implements PreflightCheck { + + private final Supplier opensearchDistributionSupplier; + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchBinPreflightCheck.class); + + @Inject + public OpensearchBinPreflightCheck(DatanodeConfiguration datanodeConfiguration) { + this(datanodeConfiguration.opensearchDistributionProvider()::get); + } + + OpensearchBinPreflightCheck(Supplier distribution) { + this.opensearchDistributionSupplier = distribution; + } + + @Override + public void runCheck() throws PreflightCheckException { + final OpensearchDistribution distribution = opensearchDistributionSupplier.get(); + final Path opensearchDir = distribution.directory(); + + if (!Files.isDirectory(opensearchDir)) { + throw new PreflightCheckException("Opensearch base directory " + opensearchDir + " doesn't exist!"); + } + + final Path binPath = distribution.getOpensearchExecutable(); + + if (!Files.exists(binPath)) { + throw new PreflightCheckException("Opensearch binary " + binPath + " doesn't exist!"); + } + + if (!Files.isExecutable(binPath)) { + final String permissions = getPermissions(binPath) + .map(p -> " Permissions of the binary are: " + p) + .orElse(""); + throw new PreflightCheckException("Opensearch binary " + binPath + " is not executable!" + permissions); + } + } + + private static Optional getPermissions(Path binPath) { + try { + return Optional.of(Files.getPosixFilePermissions(binPath)) + .map(perms -> perms.stream().map(Enum::toString).collect(Collectors.joining(","))); + } catch (IOException e) { + LOG.warn("Failed to obtain opensearch binary permissions: " + e.getMessage()); + return Optional.empty(); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpensearchDataDirCompatibilityCheck.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpensearchDataDirCompatibilityCheck.java new file mode 100644 index 000000000000..2445cc2ffef6 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/OpensearchDataDirCompatibilityCheck.java @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.github.joschi.jadconfig.ValidationException; +import jakarta.inject.Inject; +import org.graylog.datanode.DirectoryReadableValidator; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.filesystem.index.IncompatibleIndexVersionException; +import org.graylog.datanode.filesystem.index.IndicesDirectoryParser; +import org.graylog.datanode.filesystem.index.dto.IndexerDirectoryInformation; +import org.graylog.datanode.filesystem.index.dto.NodeInformation; +import org.graylog.shaded.opensearch2.org.opensearch.Version; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.file.Path; +import java.util.Locale; + +public class OpensearchDataDirCompatibilityCheck implements PreflightCheck { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchDataDirCompatibilityCheck.class); + + private final DatanodeConfiguration datanodeConfiguration; + private final IndicesDirectoryParser indicesDirectoryParser; + private final DirectoryReadableValidator directoryReadableValidator = new DirectoryReadableValidator(); + + + @Inject + public OpensearchDataDirCompatibilityCheck(DatanodeConfiguration datanodeConfiguration, IndicesDirectoryParser indicesDirectoryParser) { + this.datanodeConfiguration = datanodeConfiguration; + this.indicesDirectoryParser = indicesDirectoryParser; + } + + @Override + public void runCheck() throws PreflightCheckException { + + final Path opensearchDataDir = datanodeConfiguration.datanodeDirectories().getDataTargetDir(); + final String opensearchVersion = datanodeConfiguration.opensearchDistributionProvider().get().version(); + + try { + directoryReadableValidator.validate(opensearchDataDir.toUri().toString(), opensearchDataDir); + final IndexerDirectoryInformation info = indicesDirectoryParser.parse(opensearchDataDir); + checkCompatibility(opensearchVersion, info); + final int indicesCount = info.nodes().stream().mapToInt(n -> n.indices().size()).sum(); + LOG.info("Found {} indices and all of them are valid with current opensearch version {}", indicesCount, opensearchVersion); + } catch (IncompatibleIndexVersionException e) { + throw new PreflightCheckException("Index directory is not compatible with current version " + opensearchVersion + " of Opensearch, terminating.", e); + } catch (ValidationException e) { + throw new PreflightCheckException(e); + } + } + + private void checkCompatibility(String opensearchVersion, IndexerDirectoryInformation info) { + final Version currentVersion = Version.fromString(opensearchVersion); + for (NodeInformation node : info.nodes()) { + final Version nodeVersion = Version.fromString(node.nodeVersion()); + if (node.nodeVersion() != null && !currentVersion.isCompatible(nodeVersion)) { + final String error = String.format(Locale.ROOT, "Current version %s of Opensearch is not compatible with index version %s", currentVersion, nodeVersion); + throw new IncompatibleIndexVersionException(error); + } + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/PreflightClusterConfigurationModule.java b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/PreflightClusterConfigurationModule.java new file mode 100644 index 000000000000..c1f55b006f1c --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/bootstrap/preflight/PreflightClusterConfigurationModule.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.google.inject.AbstractModule; +import org.graylog2.cluster.ClusterConfigServiceImpl; +import org.graylog2.plugin.cluster.ClusterConfigService; +import org.graylog2.shared.plugins.ChainingClassLoader; + +public class PreflightClusterConfigurationModule extends AbstractModule { + private final ChainingClassLoader chainingClassLoader; + + public PreflightClusterConfigurationModule(ChainingClassLoader chainingClassLoader) { + this.chainingClassLoader = chainingClassLoader; + } + + @Override + protected void configure() { + bind(ChainingClassLoader.class).toInstance(chainingClassLoader); + bind(ClusterConfigService.class).to(ClusterConfigServiceImpl.class).asEagerSingleton(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/commands/Datanode.java b/data-node/src/main/java/org/graylog/datanode/commands/Datanode.java new file mode 100644 index 000000000000..283e738b4731 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/commands/Datanode.java @@ -0,0 +1,143 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.commands; + +import com.github.rvesse.airline.annotations.Command; +import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.ServiceManager; +import com.google.inject.Injector; +import com.google.inject.Key; +import com.google.inject.Module; +import com.google.inject.spi.Message; +import com.mongodb.MongoException; +import jakarta.annotation.Nonnull; +import jakarta.inject.Inject; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.bindings.ConfigurationModule; +import org.graylog.datanode.bindings.DatanodeServerBindings; +import org.graylog.datanode.bindings.PeriodicalBindings; +import org.graylog.datanode.bootstrap.DatanodeBootstrap; +import org.graylog.datanode.bootstrap.Main; +import org.graylog.datanode.configuration.DatanodeProvisioningBindings; +import org.graylog.datanode.configuration.S3RepositoryConfiguration; +import org.graylog.datanode.rest.RestBindings; +import org.graylog.datanode.shutdown.GracefulShutdown; +import org.graylog2.cluster.nodes.DataNodeDto; +import org.graylog2.cluster.nodes.DataNodeStatus; +import org.graylog2.cluster.nodes.NodeService; +import org.graylog2.configuration.TLSProtocolsConfiguration; +import org.graylog2.featureflag.FeatureFlags; +import org.graylog2.plugin.Tools; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.shared.UI; +import org.graylog2.shared.system.activities.Activity; +import org.graylog2.shared.system.activities.ActivityWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + + +@Command(name = "datanode", description = "Start Graylog Data Node") +public class Datanode extends DatanodeBootstrap { + private static final Logger LOG = LoggerFactory.getLogger(Datanode.class); + + private final S3RepositoryConfiguration s3RepositoryConfiguration = new S3RepositoryConfiguration(); + private final TLSProtocolsConfiguration tlsConfiguration = new TLSProtocolsConfiguration(); + + public Datanode() { + super("datanode", new Configuration()); + } + + @Override + protected @Nonnull List getNodeCommandBindings(FeatureFlags featureFlags) { + final ImmutableList.Builder modules = ImmutableList.builder(); + modules.add( + new ConfigurationModule(configuration), + new DatanodeServerBindings(), + new RestBindings(), + new DatanodeProvisioningBindings(), + new PeriodicalBindings() + ); + return modules.build(); + } + + @Override + public @Nonnull List getNodeCommandConfigurationBeans() { + return Arrays.asList(configuration, + tlsConfiguration, + s3RepositoryConfiguration); + } + + @Override + protected Class shutdownHook() { + return ShutdownHook.class; + } + + private static class ShutdownHook implements Runnable { + private final ActivityWriter activityWriter; + private final ServiceManager serviceManager; + private final GracefulShutdown gracefulShutdown; + + @Inject + public ShutdownHook(ActivityWriter activityWriter, + ServiceManager serviceManager, + GracefulShutdown gracefulShutdown) { + this.activityWriter = activityWriter; + this.serviceManager = serviceManager; + this.gracefulShutdown = gracefulShutdown; + } + + @Override + public void run() { + String msg = "SIGNAL received. Shutting down."; + LOG.info(msg); + activityWriter.write(new Activity(msg, Main.class)); + + gracefulShutdown.runWithoutExit(); + serviceManager.stopAsync().awaitStopped(); + } + } + + @Override + protected void startNodeRegistration(Injector injector) { + final NodeService nodeService = injector.getInstance(new Key<>() {}); + final NodeId nodeId = injector.getInstance(NodeId.class); + // always set leader to "false" on startup and let the NodePingPeriodical take care of it later + nodeService.registerServer(DataNodeDto.Builder.builder() + .setId(nodeId.getNodeId()) + .setTransportAddress(configuration.getHttpPublishUri().toString()) + .setHostname(Tools.getLocalCanonicalHostname()) + .setDataNodeStatus(DataNodeStatus.STARTING) + .build()); + } + + @Override + protected void annotateInjectorExceptions(Collection messages) { + super.annotateInjectorExceptions(messages); + for (Message message : messages) { + if (message.getCause() instanceof MongoException) { + MongoException e = (MongoException) message.getCause(); + LOG.error(UI.wallString("Unable to connect to MongoDB. Is it running and the configuration correct?\n" + + "Details: " + e.getMessage())); + System.exit(-1); + } + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/commands/DatanodeCommandsProvider.java b/data-node/src/main/java/org/graylog/datanode/commands/DatanodeCommandsProvider.java new file mode 100644 index 000000000000..17a8b77737e2 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/commands/DatanodeCommandsProvider.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.commands; + +import com.github.rvesse.airline.builder.CliBuilder; +import org.graylog2.bootstrap.CliCommand; +import org.graylog2.bootstrap.CliCommandsProvider; + +public class DatanodeCommandsProvider implements CliCommandsProvider { + @Override + public void addTopLevelCommandsOrGroups(CliBuilder builder) { + builder.withCommand(Datanode.class); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/ConfigDirRemovalThread.java b/data-node/src/main/java/org/graylog/datanode/configuration/ConfigDirRemovalThread.java new file mode 100644 index 000000000000..67f2f4b3210b --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/ConfigDirRemovalThread.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.stream.Stream; + +public class ConfigDirRemovalThread extends Thread { + private final Path opensearchConfigDir; + + public ConfigDirRemovalThread(Path opensearchConfigDir) { + this.opensearchConfigDir = opensearchConfigDir; + } + + @Override + public void run() { + deleteDirectory(opensearchConfigDir); + } + private void deleteDirectory(Path toBeDeleted) { + try { + if (Files.isDirectory(toBeDeleted)) { + try (final Stream list = Files.list(toBeDeleted)) { + list.forEach(this::deleteDirectory); + } + } + Files.delete(toBeDeleted); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeConfiguration.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeConfiguration.java new file mode 100644 index 000000000000..3b78df4747ea --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeConfiguration.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import org.graylog2.security.IndexerJwtAuthTokenProvider; + +/** + * DatanodeConfiguration holds the static configuration as parsed during the datanode startup, either from the + * config file or from the ENV properties. + */ +public record DatanodeConfiguration( + OpensearchDistributionProvider opensearchDistributionProvider, + DatanodeDirectories datanodeDirectories, + int processLogsBufferSize, + IndexerJwtAuthTokenProvider indexerJwtAuthTokenProvider +) { +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeConfigurationProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeConfigurationProvider.java new file mode 100644 index 000000000000..3cfc53f0d411 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeConfigurationProvider.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import org.graylog.datanode.Configuration; +import org.graylog2.plugin.Tools; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.security.IndexerJwtAuthTokenProvider; + +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import jakarta.inject.Singleton; + +@Singleton +public class DatanodeConfigurationProvider implements Provider { + + private final DatanodeConfiguration datanodeConfiguration; + + @Inject + public DatanodeConfigurationProvider( + final Configuration localConfiguration, + IndexerJwtAuthTokenProvider jwtTokenProvider, + OpensearchDistributionProvider opensearchDistributionProvider, + NodeId nodeId) { + datanodeConfiguration = new DatanodeConfiguration( + opensearchDistributionProvider, + DatanodeDirectories.fromConfiguration(localConfiguration, nodeId), + localConfiguration.getProcessLogsBufferSize(), + jwtTokenProvider + ); + } + + @Override + public DatanodeConfiguration get() { + return datanodeConfiguration; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeDirectories.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeDirectories.java new file mode 100644 index 000000000000..d652f904920f --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeDirectories.java @@ -0,0 +1,174 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import jakarta.annotation.Nonnull; +import org.graylog.datanode.Configuration; +import org.graylog2.plugin.system.NodeId; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; + +/** + * This is a collection of pointers to directories used to store data, logs and configuration of the managed opensearch. + */ +public class DatanodeDirectories { + + private static final Logger LOG = LoggerFactory.getLogger(DatanodeDirectories.class); + + /** + * The execute bit for directories means that owner can traverse these and access their content. + */ + protected static final FileAttribute> DIRECTORY_PERMISSIONS = PosixFilePermissions.asFileAttribute(Set.of(PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_EXECUTE)); + + private final Path dataTargetDir; + private final Path logsTargetDir; + private final Path configurationSourceDir; + private final Path configurationTargetDir; + + public DatanodeDirectories(Path dataTargetDir, Path logsTargetDir, @Nullable Path configurationSourceDir, Path configurationTargetDir) { + this.dataTargetDir = dataTargetDir; + this.logsTargetDir = logsTargetDir; + this.configurationSourceDir = configurationSourceDir; + this.configurationTargetDir = configurationTargetDir; + } + + public static DatanodeDirectories fromConfiguration(Configuration configuration, NodeId nodeId) { + final DatanodeDirectories directories = new DatanodeDirectories( + backwardsCompatible(configuration.getOpensearchDataLocation(), nodeId, "opensearch_data_location"), + backwardsCompatible(configuration.getOpensearchLogsLocation(), nodeId, "opensearch_logs_location"), + configuration.getDatanodeConfigurationLocation(), + backwardsCompatible(configuration.getOpensearchConfigLocation(), nodeId, "opensearch_config_location") + ); + + LOG.info("Opensearch of the node {} uses following directories as its storage: {}", nodeId.getNodeId(), directories); + return directories; + } + + /** + * Originally we created a subdir named by the node ID for each of the data/config/logs directories and automatically + * used that subdir. Later we discovered that this won't allow us to run rolling upgrades for opensearch, as we + * are unable to point the configuration to an exact directory. This method works as a backwards compatible + * fallback, detecting the presence of the node ID subdir and using it, if available. It also logs a warning with + * configuration change suggestion. + * TODO: Remove in 6.0 release + */ + @Deprecated(forRemoval = true) + @Nonnull + protected static Path backwardsCompatible(@Nonnull Path path, NodeId nodeId, String configProperty) { + final Path nodeIdSubdir = path.resolve(nodeId.getNodeId()); + if(Files.exists(nodeIdSubdir) && Files.isDirectory(nodeIdSubdir)) { + LOG.warn("Caution, this datanode instance uses old format of directories. Please configure {} to point directly to {}", configProperty, nodeIdSubdir.toAbsolutePath()); + return nodeIdSubdir; + } + return path; + } + + /** + * This directory is used by the managed opensearch to store its data in it. + * Read-write permissions required. + */ + public Path getDataTargetDir() { + return dataTargetDir.toAbsolutePath(); + } + + /** + * This directory is used by the managed opensearch to store its logs in it. + * Read-write permissions required. + */ + public Path getLogsTargetDir() { + return logsTargetDir.toAbsolutePath(); + } + + /** + * This directory is provided by system admin to the datanode. We read our configuration from this location, + * we read certificates from here. We'll never write anything to it. + * Read-only permissions required. + */ + public Optional getConfigurationSourceDir() { + return Optional.ofNullable(configurationSourceDir).map(Path::toAbsolutePath); + } + + public Optional resolveConfigurationSourceFile(String filename) { + final Path filePath = Path.of(filename); + if (filePath.isAbsolute()) { + return Optional.of(filePath); + } else { + return getConfigurationSourceDir().map(dir -> dir.resolve(filename)); + } + } + + /** + * This directory is used by us to store all runtime-generated configuration of datanode. This + * could be truststores, private keys, certificates and other generated config files. + * We also synchronize and generate opensearch configuration into a subdir of this dir, see {@link #createUniqueOpensearchProcessConfigurationDir()} + * Read-write permissions required. + */ + public Path getConfigurationTargetDir() { + return configurationTargetDir.toAbsolutePath(); + } + + @Nonnull + static Path createRestrictedAccessFile(Path resolvedPath) throws IOException { + Files.deleteIfExists(resolvedPath); + final Set permissions = Set.of(PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_READ); + final FileAttribute> fileAttributes = PosixFilePermissions.asFileAttribute(permissions); + return Files.createFile(resolvedPath, fileAttributes); + } + + + /** + * This is a subdirectory of {@link #getConfigurationTargetDir()}. It's used by us to synchronize and generate opensearch + * configuration. Opensearch is then instructed to accept this dir as its base configuration dir (OPENSEARCH_PATH_CONF env property). + * Opensearch configuration is always regenerated during runtime, so the target dir may be temp and deleted when + * the JVM terminates. This prevents concurrency collisions, outdated files, need to remove existing but not needed. + */ + public OpensearchConfigurationDir createUniqueOpensearchProcessConfigurationDir() { + final Path configRootDir = getConfigurationTargetDir(); + try { + final Path opensearchConfigDir = Files.createTempDirectory(configRootDir, "opensearch", DIRECTORY_PERMISSIONS); + // the process configuration dir can be safely removed when this JVM terminates. It will be generated + // again next time we'll start a process. + Runtime.getRuntime().addShutdownHook(new ConfigDirRemovalThread(opensearchConfigDir)); + return new OpensearchConfigurationDir(opensearchConfigDir); + } catch (IOException e) { + throw new OpensearchConfigurationException("Failed to create opensearch configuration directory", e); + } + } + + + + @Override + public String toString() { + return "DatanodeDirectories{" + + "dataTargetDir='" + getDataTargetDir() + '\'' + + ", logsTargetDir='" + getLogsTargetDir() + '\'' + + ", configurationSourceDir='" + getConfigurationSourceDir() + '\'' + + ", configurationTargetDir='" + getConfigurationTargetDir() + '\'' + + '}'; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystore.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystore.java new file mode 100644 index 000000000000..54097c4df35f --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystore.java @@ -0,0 +1,203 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.google.common.eventbus.EventBus; +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.apache.commons.lang3.RandomStringUtils; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.graylog.security.certutil.CertConstants; +import org.graylog.security.certutil.KeyPair; +import org.graylog.security.certutil.cert.CertificateChain; +import org.graylog.security.certutil.csr.CsrGenerator; +import org.graylog.security.certutil.csr.InMemoryKeystoreInformation; +import org.graylog.security.certutil.csr.exceptions.CSRGenerationException; +import org.graylog.security.certutil.keystore.storage.KeystoreUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; +import java.security.Key; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.NoSuchProviderException; +import java.security.Security; +import java.security.SignatureException; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertPath; +import java.security.cert.CertPathValidator; +import java.security.cert.CertPathValidatorException; +import java.security.cert.CertPathValidatorResult; +import java.security.cert.Certificate; +import java.security.cert.CertificateException; +import java.security.cert.CertificateFactory; +import java.security.cert.PKIXParameters; +import java.security.cert.X509Certificate; +import java.util.Arrays; +import java.util.Date; +import java.util.List; + +import static org.graylog.security.certutil.CertConstants.PKCS12; + +public class DatanodeKeystore { + + static { + Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); + } + + + private static final Logger LOG = LoggerFactory.getLogger(DatanodeKeystore.class); + private final DatanodeDirectories datanodeDirectories; + private final String passwordSecret; + + public static final Path DATANODE_KEYSTORE_FILE = Path.of("keystore.jks"); + public static String DATANODE_KEY_ALIAS = "datanode"; + private final EventBus eventBus; + + @Inject + public DatanodeKeystore(DatanodeConfiguration configuration, final @Named("password_secret") String passwordSecret, EventBus eventBus) { + this(configuration.datanodeDirectories(), passwordSecret, eventBus); + } + + public DatanodeKeystore(DatanodeDirectories datanodeDirectories, String passwordSecret, EventBus eventBus) { + this.datanodeDirectories = datanodeDirectories; + this.passwordSecret = passwordSecret; + this.eventBus = eventBus; + } + + public synchronized boolean exists() { + return Files.exists(keystorePath()); + } + + public synchronized boolean hasSignedCertificate() throws DatanodeKeystoreException { + return !isSelfSignedDatanodeCert(loadKeystore()); + } + + public synchronized static boolean isSignedCertificateChain(KeyStore keystore) throws DatanodeKeystoreException { + return !isSelfSignedDatanodeCert(keystore); + } + + private static boolean isSelfSignedDatanodeCert(KeyStore keystore) throws DatanodeKeystoreException { + try { + final Certificate certificate = keystore.getCertificate(DATANODE_KEY_ALIAS); + if (certificate instanceof X509Certificate nodeCert) { + return nodeCert.getIssuerX500Principal().equals(nodeCert.getSubjectX500Principal()); + } else { + throw new DatanodeKeystoreException("Unsupported type of data node certificate: " + certificate.getClass()); + } + } catch (KeyStoreException e) { + throw new DatanodeKeystoreException("Failed to check if datanode certificate is self-signed.", e); + } + } + + @Nonnull + private Path keystorePath() { + return datanodeDirectories.getConfigurationTargetDir().resolve(DATANODE_KEYSTORE_FILE); + } + + public synchronized KeyStore create(KeyPair keyPair) throws DatanodeKeystoreException { + try { + return persistKeystore(keyPair.toKeystore(DATANODE_KEY_ALIAS, passwordSecret.toCharArray())); + } catch (Exception e) { + throw new DatanodeKeystoreException(e); + } + } + + public synchronized KeyStore create(KeyStore keystore) throws DatanodeKeystoreException { + return persistKeystore(keystore); + } + + public synchronized void replaceCertificatesInKeystore(CertificateChain certificateChain) throws DatanodeKeystoreException { + try { + final KeyStore keystore = loadKeystore(); + Key privateKey = keystore.getKey(DATANODE_KEY_ALIAS, passwordSecret.toCharArray()); + // replace the existing self-signed certificates chain with the signed chain from the event + keystore.setKeyEntry(DATANODE_KEY_ALIAS, privateKey, passwordSecret.toCharArray(), certificateChain.toCertificateChainArray()); + persistKeystore(keystore); + LOG.info("Persisting signed certificates to the datanode keystore finished"); + } catch (KeyStoreException | NoSuchAlgorithmException | UnrecoverableKeyException e) { + throw new DatanodeKeystoreException(e); + } + } + + private synchronized KeyStore persistKeystore(KeyStore keystore) throws DatanodeKeystoreException { + try (FileOutputStream fos = new FileOutputStream(keystorePath().toFile())) { + keystore.store(fos, passwordSecret.toCharArray()); + } catch (IOException | CertificateException | KeyStoreException | NoSuchAlgorithmException e) { + throw new DatanodeKeystoreException(e); + } + triggerChangeEvent(keystore); + return keystore; + } + + private void triggerChangeEvent(KeyStore keystore) throws DatanodeKeystoreException { + if (isSignedCertificateChain(keystore)) { + eventBus.post(new DatanodeKeystoreChangedEvent()); + } + } + + public synchronized KeyStore loadKeystore() throws DatanodeKeystoreException { + try (FileInputStream fis = new FileInputStream(keystorePath().toFile())) { + KeyStore keystore = KeyStore.getInstance(PKCS12); + keystore.load(fis, passwordSecret.toCharArray()); + return keystore; + } catch (IOException | CertificateException | KeyStoreException | NoSuchAlgorithmException e) { + throw new DatanodeKeystoreException(e); + } + } + + public synchronized InMemoryKeystoreInformation getSafeCopy() throws DatanodeKeystoreException { + final char[] randomKeystorePassword = RandomStringUtils.randomAlphabetic(256).toCharArray(); + try { + final KeyStore reencrypted = KeystoreUtils.newStoreCopyContent(loadKeystore(), passwordSecret.toCharArray(), randomKeystorePassword); + return new InMemoryKeystoreInformation(reencrypted, randomKeystorePassword); + } catch (GeneralSecurityException | IOException e) { + throw new DatanodeKeystoreException(e); + } + } + + public synchronized PKCS10CertificationRequest createCertificateSigningRequest(String hostname, List altNames) throws DatanodeKeystoreException, CSRGenerationException { + final InMemoryKeystoreInformation keystore = new InMemoryKeystoreInformation(loadKeystore(), passwordSecret.toCharArray()); + return CsrGenerator.generateCSR(keystore, DATANODE_KEY_ALIAS, hostname, altNames); + } + + @Nullable + public synchronized Date getCertificateExpiration() { + try { + final KeyStore keystore = loadKeystore(); + if (isSignedCertificateChain(keystore)) { + final X509Certificate datanodeCert = (X509Certificate) keystore.getCertificate(DATANODE_KEY_ALIAS); + return datanodeCert.getNotAfter(); + } else { + return null; + } + } catch (KeyStoreException | DatanodeKeystoreException e) { + throw new RuntimeException(e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystoreChangedEvent.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystoreChangedEvent.java new file mode 100644 index 000000000000..8d037e7677c6 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystoreChangedEvent.java @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +/** + * This event will be triggered every time a datanode private key or certificate changes. Other parts of the system, + * that use the keystore (opensearch process, jersey) should react to this event and refresh their security setups + */ +public class DatanodeKeystoreChangedEvent { +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystoreException.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystoreException.java new file mode 100644 index 000000000000..76497f40cb19 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeKeystoreException.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import java.security.KeyStoreException; + +public class DatanodeKeystoreException extends Exception { + public DatanodeKeystoreException(Throwable cause) { + super(cause); + } + + public DatanodeKeystoreException(String cause) { + super(cause); + } + + public DatanodeKeystoreException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeProvisioningBindings.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeProvisioningBindings.java new file mode 100644 index 000000000000..385bb432afdc --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeProvisioningBindings.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.google.inject.AbstractModule; +import org.graylog2.bootstrap.preflight.GraylogCertificateProvisioner; +import org.graylog2.bootstrap.preflight.GraylogCertificateProvisionerImpl; +import org.graylog2.cluster.certificates.CertificateExchange; +import org.graylog2.cluster.certificates.CertificateExchangeImpl; + +public class DatanodeProvisioningBindings extends AbstractModule { + + @Override + protected void configure() { + bind(CertificateExchange.class).to(CertificateExchangeImpl.class); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeTrustManagerProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeTrustManagerProvider.java new file mode 100644 index 000000000000..56e6d6f1726b --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/DatanodeTrustManagerProvider.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.google.common.eventbus.EventBus; +import com.google.common.eventbus.Subscribe; +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import jakarta.inject.Singleton; +import org.graylog.datanode.opensearch.OpensearchConfigurationChangeEvent; +import org.graylog2.security.CustomCAX509TrustManager; +import org.graylog2.security.TrustManagerAggregator; + +import javax.net.ssl.X509TrustManager; +import java.security.KeyStore; +import java.util.List; + +@Singleton +public class DatanodeTrustManagerProvider implements Provider { + + private final CustomCAX509TrustManager customCAX509TrustManager; + private volatile KeyStore datanodeTruststore; + + @Inject + public DatanodeTrustManagerProvider(CustomCAX509TrustManager CustomCAX509TrustManager, EventBus eventBus) { + customCAX509TrustManager = CustomCAX509TrustManager; + eventBus.register(this); + } + + @Subscribe + public void onOpensearchConfigurationChange(OpensearchConfigurationChangeEvent e) { + setTruststore(e.config().trustStore()); + } + + private void setTruststore(KeyStore keyStore) { + this.datanodeTruststore = keyStore; + } + + + @Override + public X509TrustManager get() { + final X509TrustManager datanodeTrustManager = TrustManagerAggregator.trustManagerFromKeystore(this.datanodeTruststore); + return new TrustManagerAggregator(List.of(datanodeTrustManager, customCAX509TrustManager)); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchArchitecture.java b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchArchitecture.java new file mode 100644 index 000000000000..cfede539351e --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchArchitecture.java @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +public enum OpensearchArchitecture { + x64, + aarch64; + + public static OpensearchArchitecture fromOperatingSystem() { + final var osArch = System.getProperty("os.arch"); + return fromCode(osArch); + } + + public static OpensearchArchitecture fromCode(String osArch) { + return switch (osArch) { + case "amd64" -> x64; + case "x86_64" -> x64; + case "x64" -> x64; + case "aarch64" -> aarch64; + case "arm64" -> aarch64; + default -> + throw new UnsupportedOperationException("Unsupported OpenSearch distribution architecture: " + osArch); + }; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationDir.java b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationDir.java new file mode 100644 index 000000000000..14754c37d252 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationDir.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +public record OpensearchConfigurationDir(java.nio.file.Path configurationRoot) { + public Path createOpensearchProcessConfigurationFile(Path relativePath) throws IOException { + if (relativePath.isAbsolute()) { + throw new IllegalArgumentException("Only relative paths supported here!" + relativePath); + } + + final Path resolvedPath = configurationRoot.resolve(relativePath); + + // recursively create all parent directories + Files.createDirectories(resolvedPath.getParent(), DatanodeDirectories.DIRECTORY_PERMISSIONS); + return DatanodeDirectories.createRestrictedAccessFile(resolvedPath); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationException.java b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationException.java new file mode 100644 index 000000000000..9fb14dbfceec --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationException.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +public class OpensearchConfigurationException extends RuntimeException { + public OpensearchConfigurationException(String message) { + super(message); + } + + public OpensearchConfigurationException(Exception cause) { + super(cause); + } + + public OpensearchConfigurationException(String message, Exception cause) { + super(message, cause); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationService.java b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationService.java new file mode 100644 index 000000000000..529f6df09667 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchConfigurationService.java @@ -0,0 +1,124 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.google.common.eventbus.EventBus; +import com.google.common.eventbus.Subscribe; +import com.google.common.util.concurrent.AbstractIdleService; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.opensearch.OpensearchConfigurationChangeEvent; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationBean; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; + +import java.security.cert.X509Certificate; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +@Singleton +public class OpensearchConfigurationService extends AbstractIdleService { + private final Configuration localConfiguration; + private final DatanodeConfiguration datanodeConfiguration; + private final Set> opensearchConfigurationBeans; + + /** + * This configuration won't survive datanode restart. But it can be repeatedly provided to the managed opensearch + */ + private final Map transientConfiguration = new ConcurrentHashMap<>(); + + private final List trustedCertificates = new ArrayList<>(); + private final EventBus eventBus; + + @Inject + public OpensearchConfigurationService(final Configuration localConfiguration, + final DatanodeConfiguration datanodeConfiguration, + final Set> opensearchConfigurationBeans, + final EventBus eventBus) { + this.localConfiguration = localConfiguration; + this.datanodeConfiguration = datanodeConfiguration; + this.opensearchConfigurationBeans = opensearchConfigurationBeans; + this.eventBus = eventBus; + eventBus.register(this); + } + + @Override + protected void startUp() { + triggerConfigurationChangedEvent(); + } + + @Override + protected void shutDown() { + + } + + @Subscribe + public void onKeystoreChange(DatanodeKeystoreChangedEvent event) { + // configuration relies on the keystore. Every change there should rebuild the configuration and restart + // dependent services + triggerConfigurationChangedEvent(); + } + + + public void setAllowlist(List allowlist, List trustedCertificates) { + this.trustedCertificates.addAll(trustedCertificates); + setTransientConfiguration("reindex.remote.allowlist", String.join(", ", allowlist)); + } + + public void removeAllowlist() { + removeTransientConfiguration("reindex.remote.allowlist"); + } + + public void setTransientConfiguration(String key, String value) { + this.transientConfiguration.put(key, value); + triggerConfigurationChangedEvent(); + } + + public void removeTransientConfiguration(String key) { + final Object removedValue = this.transientConfiguration.remove(key); + if (removedValue != null) { + triggerConfigurationChangedEvent(); + } + } + + private OpensearchConfiguration get() { + + final List configurationParts = opensearchConfigurationBeans.stream() + .map(bean -> bean.buildConfigurationPart(new OpensearchConfigurationParams(trustedCertificates, transientConfiguration))) + .collect(Collectors.toList()); + + return new OpensearchConfiguration( + datanodeConfiguration.opensearchDistributionProvider().get(), + datanodeConfiguration.datanodeDirectories(), + localConfiguration.getHostname(), + localConfiguration.getOpensearchHttpPort(), + configurationParts + ); + } + + private void triggerConfigurationChangedEvent() { + eventBus.post(new OpensearchConfigurationChangeEvent(get())); + } + + +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchDistributionProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchDistributionProvider.java new file mode 100644 index 000000000000..4926db7e92e0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchDistributionProvider.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.google.common.base.Suppliers; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.OpensearchDistribution; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import jakarta.inject.Singleton; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; +import java.util.function.Supplier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@Singleton +public class OpensearchDistributionProvider implements Provider { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchDistributionProvider.class); + public static final Pattern FULL_NAME_PATTERN = Pattern.compile("opensearch-(.*)-(.+)-(.+)"); + public static final Pattern SHORT_NAME_PATTERN = Pattern.compile("opensearch-(.*)"); + + private final Supplier distribution; + + @Inject + public OpensearchDistributionProvider(final Configuration localConfiguration) { + this(Path.of(localConfiguration.getOpensearchDistributionRoot()), OpensearchArchitecture.fromOperatingSystem()); + } + + public OpensearchDistributionProvider(final Path opensearchDistributionRoot, OpensearchArchitecture architecture) { + this.distribution = Suppliers.memoize(() -> detectInDirectory(opensearchDistributionRoot, architecture)); + } + + @Override + public OpensearchDistribution get() { + return distribution.get(); + } + + private static OpensearchDistribution detectInDirectory(Path rootDistDirectory, OpensearchArchitecture osArch) { + Objects.requireNonNull(rootDistDirectory, "Dist directory needs to be provided"); + + // if the base directory points directly to one opensearch distribution, we should return it directly. + // If the format doesn't fit, we'll look for opensearch distributions in this root directory. + final Optional distDirectory = parse(rootDistDirectory); + return distDirectory.orElseGet(() -> detectInSubdirectory(rootDistDirectory, osArch)); + + } + + private static OpensearchDistribution detectInSubdirectory(Path directory, OpensearchArchitecture arch) { + final List opensearchDistributions; + try ( + var files = Files.list(directory); + ) { + opensearchDistributions = files + .filter(Files::isDirectory) + .flatMap(f -> parse(f).stream()) + .toList(); + } catch (IOException e) { + throw createErrorMessage(directory, arch, "Failed to list content of provided directory", e); + } + + if (opensearchDistributions.isEmpty()) { + throw createErrorMessage(directory, arch, "Could not detect any opensearch distribution"); + } + + LOG.info("Found following opensearch distributions: " + opensearchDistributions.stream().map(d -> d.directory().toAbsolutePath()).toList()); + + return findByArchitecture(opensearchDistributions, arch) + .orElseGet(() -> findWithoutArchitecture(opensearchDistributions) + .orElseThrow(() -> createErrorMessage(directory, arch, "No Opensearch distribution found for your system architecture"))); + } + + private static IllegalArgumentException createErrorMessage(Path directory, OpensearchArchitecture arch, String message) { + return createErrorMessage(directory, arch, message, null); + } + + + private static IllegalArgumentException createErrorMessage(Path directory, OpensearchArchitecture arch, String errorMessage, Exception cause) { + final String message = String.format(Locale.ROOT, "%s. Directory used for Opensearch detection: %s. Please configure opensearch_location to a directory that contains an opensearch distribution for your architecture %s. You can download Opensearch from https://opensearch.org/downloads.html . Please extract the downloaded distribution and point opensearch_location configuration option to that directory.", errorMessage, directory.toAbsolutePath(), arch); + return new IllegalArgumentException(message, cause); + } + + public static String archCode(final String osArch) { + return switch (osArch) { + case "amd64" -> "x64"; + case "x86_64" -> "x64"; + case "aarch64" -> "aarch64"; + case "arm64" -> "aarch64"; + default -> + throw new UnsupportedOperationException("Unsupported OpenSearch distribution architecture: " + osArch); + }; + } + + private static Optional findByArchitecture(List available, OpensearchArchitecture arch) { + return available.stream() + .filter(d -> arch.equals(d.architecture())) + .findAny(); + } + + private static Optional findWithoutArchitecture(List available) { + return available.stream().filter(d -> d.architecture() == null).findFirst(); + } + + private static Optional parse(Path path) { + final String filename = path.getFileName().toString(); + final Matcher matcher = FULL_NAME_PATTERN.matcher(filename); + if (matcher.matches()) { + return Optional.of(new OpensearchDistribution(path, matcher.group(1), matcher.group(2), OpensearchArchitecture.fromCode(matcher.group(3)))); + } else { + final Matcher shortMatcher = SHORT_NAME_PATTERN.matcher(filename); + if (shortMatcher.matches()) { + return Optional.of(new OpensearchDistribution(path, shortMatcher.group(1))); + } else { + return Optional.empty(); + } + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchKeystoreProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchKeystoreProvider.java new file mode 100644 index 000000000000..400ff4d86c1b --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/OpensearchKeystoreProvider.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + + +import com.google.common.eventbus.EventBus; +import com.google.common.eventbus.Subscribe; +import io.jsonwebtoken.lang.Collections; +import jakarta.annotation.Nonnull; +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import jakarta.inject.Singleton; +import org.graylog.datanode.opensearch.OpensearchConfigurationChangeEvent; +import org.graylog.security.certutil.KeyStoreDto; +import org.graylog.security.certutil.csr.KeystoreInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +@Singleton +public class OpensearchKeystoreProvider implements Provider> { + + public enum Store {CONFIGURED, TRUSTSTORE, HTTP, TRANSPORT} + + private static final Logger log = LoggerFactory.getLogger(OpensearchKeystoreProvider.class); + + private final Map keystores = new ConcurrentHashMap<>(); + + @Inject + public OpensearchKeystoreProvider(EventBus eventBus) { + eventBus.register(this); + } + + @Subscribe + @SuppressWarnings("unused") + public void onConfigurationChangeEvent(OpensearchConfigurationChangeEvent event) { + try { + keystores.put(Store.TRUSTSTORE, KeyStoreDto.fromKeyStore(event.config().trustStore())); + + event.config().httpCertificate() + .map(OpensearchKeystoreProvider::toDto) + .ifPresentOrElse(dto -> keystores.put(Store.HTTP, dto), () -> keystores.remove(Store.HTTP)); + + event.config().transportCertificate() + .map(OpensearchKeystoreProvider::toDto) + .ifPresentOrElse(dto -> keystores.put(Store.TRANSPORT, dto), () -> keystores.remove(Store.TRANSPORT)); + + } catch (Exception e) { + log.error("Error reading truststore", e); + } + } + + @Nonnull + private static KeyStoreDto toDto(KeystoreInformation cert) { + try { + return KeyStoreDto.fromKeyStore(cert.loadKeystore()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public Map get() { + return Collections.immutable(keystores); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/S3RepositoryConfiguration.java b/data-node/src/main/java/org/graylog/datanode/configuration/S3RepositoryConfiguration.java new file mode 100644 index 000000000000..e4bb6f258673 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/S3RepositoryConfiguration.java @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.github.joschi.jadconfig.Parameter; +import com.github.joschi.jadconfig.converters.BooleanConverter; +import org.apache.commons.lang3.StringUtils; +import org.graylog2.configuration.Documentation; + +import java.util.Arrays; +import java.util.Map; + +public class S3RepositoryConfiguration { + + @Documentation("S3 repository access key for searchable snapshots") + @Parameter(value = "s3_client_default_access_key") + private String s3ClientDefaultAccessKey; + + @Documentation("S3 repository secret key for searchable snapshots") + @Parameter(value = "s3_client_default_secret_key") + private String s3ClientDefaultSecretKey; + + @Documentation("S3 repository protocol for searchable snapshots") + @Parameter(value = "s3_client_default_protocol") + private String s3ClientDefaultProtocol = "http"; + + @Documentation("S3 repository endpoint for searchable snapshots") + @Parameter(value = "s3_client_default_endpoint") + private String s3ClientDefaultEndpoint; + + @Documentation("S3 repository region for searchable snapshots") + @Parameter(value = "s3_client_default_region") + private String s3ClientDefaultRegion = "us-east-2"; + + @Documentation("S3 repository path-style access for searchable snapshots") + @Parameter(value = "s3_client_default_path_style_access", converter = BooleanConverter.class) + private boolean s3ClientDefaultPathStyleAccess = true; + + + /** + * access and secret keys are handled separately and stored in an opensearch keystore. + * See usages of {@link #getS3ClientDefaultAccessKey()} and {@link #getS3ClientDefaultSecretKey()} + */ + public Map toOpensearchProperties() { + return Map.of( + "s3.client.default.protocol", s3ClientDefaultProtocol, + "s3.client.default.endpoint", s3ClientDefaultEndpoint, + "s3.client.default.region", s3ClientDefaultRegion, + "s3.client.default.path_style_access", String.valueOf(s3ClientDefaultPathStyleAccess) + ); + } + + public String getS3ClientDefaultAccessKey() { + return s3ClientDefaultAccessKey; + } + + public String getS3ClientDefaultSecretKey() { + return s3ClientDefaultSecretKey; + } + + /** + * Verify that either both access and secret keys and the endpoint are configured or none of them. Partial configuration + * will lead to an IllegalStateException. + */ + public boolean isRepositoryEnabled() { + if (noneBlank(s3ClientDefaultEndpoint, s3ClientDefaultAccessKey, s3ClientDefaultSecretKey)) { + // All the required properties are set and not blank, s3 repository is enabled + return true; + } else if (allBlank(s3ClientDefaultEndpoint, s3ClientDefaultAccessKey, s3ClientDefaultSecretKey)) { + // all are empty, this means repository is not configured at all + return false; + } else { + // One or two properties are configured, this is an incomplete configuration we can't handle this situation + throw new IllegalStateException(""" + S3 Client not configured properly, all + s3_client_default_access_key, s3_client_default_secret_key and s3_client_default_endpoint + have to be provided in the configuration!"""); + } + } + + private boolean noneBlank(String... properties) { + return Arrays.stream(properties).noneMatch(StringUtils::isBlank); + } + + private boolean allBlank(String... properties) { + return Arrays.stream(properties).allMatch(StringUtils::isBlank); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/variants/DatanodeKeystoreOpensearchCertificatesProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/variants/DatanodeKeystoreOpensearchCertificatesProvider.java new file mode 100644 index 000000000000..2736bbfb80d3 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/variants/DatanodeKeystoreOpensearchCertificatesProvider.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration.variants; + +import jakarta.inject.Inject; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.configuration.DatanodeKeystoreException; +import org.graylog.datanode.configuration.OpensearchConfigurationException; +import org.graylog.security.certutil.csr.InMemoryKeystoreInformation; + +public final class DatanodeKeystoreOpensearchCertificatesProvider implements OpensearchCertificatesProvider { + private final DatanodeKeystore datanodeKeystore; + + @Inject + public DatanodeKeystoreOpensearchCertificatesProvider(final DatanodeKeystore datanodeKeystore) { + this.datanodeKeystore = datanodeKeystore; + } + + @Override + public boolean isConfigured(Configuration localConfiguration) { + try { + return datanodeKeystore.exists() && datanodeKeystore.hasSignedCertificate(); + } catch (DatanodeKeystoreException e) { + throw new OpensearchConfigurationException(e); + } + } + + @Override + public OpensearchCertificates build() { + try { + final InMemoryKeystoreInformation safeCopy = this.datanodeKeystore.getSafeCopy(); + return new OpensearchCertificates(safeCopy, safeCopy); + } catch (DatanodeKeystoreException e) { + throw new OpensearchConfigurationException(e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/variants/LocalConfigurationCertificatesProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/variants/LocalConfigurationCertificatesProvider.java new file mode 100644 index 000000000000..25772a8b1ee0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/variants/LocalConfigurationCertificatesProvider.java @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration.variants; + +import jakarta.annotation.Nonnull; +import jakarta.inject.Inject; +import org.apache.commons.lang3.RandomStringUtils; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.configuration.OpensearchConfigurationException; +import org.graylog.security.certutil.csr.FilesystemKeystoreInformation; +import org.graylog.security.certutil.csr.InMemoryKeystoreInformation; +import org.graylog.security.certutil.csr.KeystoreInformation; +import org.graylog.security.certutil.keystore.storage.KeystoreUtils; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyStore; +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; + +import static org.graylog.datanode.Configuration.HTTP_CERTIFICATE_PASSWORD_PROPERTY; +import static org.graylog.datanode.Configuration.TRANSPORT_CERTIFICATE_PASSWORD_PROPERTY; + +public final class LocalConfigurationCertificatesProvider implements OpensearchCertificatesProvider { + + private final String tranportCertificateFile; + private final String httpCertificateFile; + private final String transportCertificatePassword; + private final String httpCertificatePassword; + private final DatanodeConfiguration datanodeConfiguration; + private final String transportCertificateAlias; + private final String httpCertificateAlias; + + @Inject + public LocalConfigurationCertificatesProvider(final Configuration localConfiguration, + final DatanodeConfiguration datanodeConfiguration) { + this.datanodeConfiguration = datanodeConfiguration; + + this.tranportCertificateFile = localConfiguration.getDatanodeTransportCertificate(); + this.transportCertificatePassword = localConfiguration.getDatanodeTransportCertificatePassword(); + this.transportCertificateAlias = localConfiguration.getDatanodeTransportCertificateAlias(); + + this.httpCertificateFile = localConfiguration.getDatanodeHttpCertificate(); + this.httpCertificatePassword = localConfiguration.getDatanodeHttpCertificatePassword(); + this.httpCertificateAlias = localConfiguration.getDatanodeHttpCertificateAlias(); + } + + @Override + public boolean isConfigured(Configuration localConfiguration) throws OpensearchConfigurationException { + + if (noneOfRequiredConfigOptionsProvided()) { + return false; // none of the uploaded cert options is provided => not usable for this security config, skip this config + } + + List errors = new LinkedList<>(); + + if (isBlank(transportCertificatePassword)) { + errors.add(TRANSPORT_CERTIFICATE_PASSWORD_PROPERTY + " required. Please configure password to your transport certificates keystore."); + } + + if (!fileExists(tranportCertificateFile)) { + errors.add("transport_certificate required. Please provide a path to a certificate file in your configuration."); + } + + if (isBlank(httpCertificatePassword)) { + errors.add(HTTP_CERTIFICATE_PASSWORD_PROPERTY + " required. Please configure password to your http certificates keystore."); + } + + if (!fileExists(httpCertificateFile)) { + errors.add("http_certificate required. Please provide a path to a certificate file in your configuration."); + } + + if (!errors.isEmpty()) { + throw new OpensearchConfigurationException("Configuration incomplete, check the following settings: " + String.join(", ", errors)); + } + + return true; + } + + private boolean isBlank(String value) { + return value == null || value.isBlank(); + } + + private boolean fileExists(String filename) { + return Optional.ofNullable(filename) + .flatMap(fileName -> datanodeConfiguration.datanodeDirectories().resolveConfigurationSourceFile(filename)) + .map(Files::exists) + .orElse(false); + } + + /** + * We require either full set of http and transport certificates and their keys or nothing. Anything in-between will + * lead to an exception, it's a mismatched configuration and would cause problems in the future. + */ + private boolean noneOfRequiredConfigOptionsProvided() { + return isBlank(transportCertificatePassword) && + isBlank(httpCertificatePassword) && + isBlank(httpCertificateFile) && + isBlank(tranportCertificateFile); + } + + @Override + public OpensearchCertificates build() { + + final Path transportCertPath = datanodeConfiguration.datanodeDirectories().resolveConfigurationSourceFile(tranportCertificateFile).orElseThrow(() -> new RuntimeException("This should not happen, certificate expected")); + final InMemoryKeystoreInformation transportKeystore = reencrypt(new FilesystemKeystoreInformation(transportCertPath, transportCertificatePassword.toCharArray())); + + final Path httpCertPath = datanodeConfiguration.datanodeDirectories().resolveConfigurationSourceFile(httpCertificateFile).orElseThrow(() -> new RuntimeException("This should not happen, certificate expected")); + final InMemoryKeystoreInformation httpKeystore = reencrypt(new FilesystemKeystoreInformation(httpCertPath, httpCertificatePassword.toCharArray())); + + return new OpensearchCertificates(transportKeystore, transportCertificateAlias, httpKeystore, httpCertificateAlias); + } + + @Nonnull + private static InMemoryKeystoreInformation reencrypt(KeystoreInformation keystoreInformation) { + try { + final char[] oneTimePassword = RandomStringUtils.randomAlphabetic(256).toCharArray(); + final KeyStore reencrypted = KeystoreUtils.newStoreCopyContent(keystoreInformation.loadKeystore(), keystoreInformation.password(), oneTimePassword); + return new InMemoryKeystoreInformation(reencrypted, oneTimePassword); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/variants/NoOpensearchCertificatesProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/variants/NoOpensearchCertificatesProvider.java new file mode 100644 index 000000000000..2aacfbde2cfc --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/variants/NoOpensearchCertificatesProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration.variants; + +import org.graylog.datanode.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Deprecated +public class NoOpensearchCertificatesProvider implements OpensearchCertificatesProvider { + + private static final Logger LOG = LoggerFactory.getLogger(NoOpensearchCertificatesProvider.class); + + @Override + public boolean isConfigured(final Configuration localConfiguration) { + return localConfiguration.isInsecureStartup(); + } + + @Override + public OpensearchCertificates build() { + LOG.warn("Insecure configuration is deprecated. Please use selfsigned_startup to create fully encrypted setups."); + return OpensearchCertificates.none(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/variants/OpensearchCertificates.java b/data-node/src/main/java/org/graylog/datanode/configuration/variants/OpensearchCertificates.java new file mode 100644 index 000000000000..a97fe2eed416 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/variants/OpensearchCertificates.java @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration.variants; + +import jakarta.annotation.Nullable; +import org.graylog.security.certutil.csr.KeystoreInformation; + +public class OpensearchCertificates { + + @Nullable + private final KeystoreInformation transportCertificate; + @Nullable + private final String transportKeyAlias; + @Nullable + private final KeystoreInformation httpCertificate; + @Nullable + private final String httpKeyAlias; + + public OpensearchCertificates(@Nullable KeystoreInformation transportCertificate, @Nullable String transportKeyAlias, @Nullable KeystoreInformation httpCertificate, @Nullable String httpKeyAlias) { + this.transportCertificate = transportCertificate; + this.transportKeyAlias = transportKeyAlias; + this.httpCertificate = httpCertificate; + this.httpKeyAlias = httpKeyAlias; + } + + public OpensearchCertificates(KeystoreInformation transportCertificate, KeystoreInformation httpCertificate) { + // null aliases mean autodetection - first alias will be used + this(transportCertificate, null, httpCertificate, null); + } + + public static OpensearchCertificates none() { + return new OpensearchCertificates(null, null, null, null); + } + + @Nullable + public KeystoreInformation getTransportCertificate() { + return transportCertificate; + } + + @Nullable + public KeystoreInformation getHttpCertificate() { + return httpCertificate; + } + + @Nullable + public String getTransportKeyAlias() { + return transportKeyAlias; + } + + @Nullable + public String getHttpKeyAlias() { + return httpKeyAlias; + } + + public boolean hasBothCertificates() { + return getHttpCertificate() != null && getTransportCertificate() != null; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/configuration/variants/OpensearchCertificatesProvider.java b/data-node/src/main/java/org/graylog/datanode/configuration/variants/OpensearchCertificatesProvider.java new file mode 100644 index 000000000000..61169d69d4a0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/configuration/variants/OpensearchCertificatesProvider.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration.variants; + +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.OpensearchConfigurationException; + +public interface OpensearchCertificatesProvider { + + boolean isConfigured(final Configuration localConfiguration) throws OpensearchConfigurationException; + + OpensearchCertificates build(); +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/IncompatibleIndexVersionException.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/IncompatibleIndexVersionException.java new file mode 100644 index 000000000000..b83582ae0225 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/IncompatibleIndexVersionException.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index; + +public class IncompatibleIndexVersionException extends RuntimeException { + public IncompatibleIndexVersionException(Throwable cause) { + super(cause); + } + + public IncompatibleIndexVersionException(String message) { + super(message); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/IndexerInformationParserException.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/IndexerInformationParserException.java new file mode 100644 index 000000000000..964b3671ef78 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/IndexerInformationParserException.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index; + +public class IndexerInformationParserException extends RuntimeException { + public IndexerInformationParserException(String message) { + super(message); + } + + public IndexerInformationParserException(String message, Exception cause) { + super(message, cause); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/IndicesDirectoryParser.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/IndicesDirectoryParser.java new file mode 100644 index 000000000000..76159ea4c2e6 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/IndicesDirectoryParser.java @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import org.graylog.datanode.filesystem.index.dto.IndexInformation; +import org.graylog.datanode.filesystem.index.dto.IndexerDirectoryInformation; +import org.graylog.datanode.filesystem.index.dto.NodeInformation; +import org.graylog.datanode.filesystem.index.dto.ShardInformation; +import org.graylog.datanode.filesystem.index.indexreader.ShardStats; +import org.graylog.datanode.filesystem.index.indexreader.ShardStatsParser; +import org.graylog.datanode.filesystem.index.statefile.StateFile; +import org.graylog.datanode.filesystem.index.statefile.StateFileParser; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +@Singleton +public class IndicesDirectoryParser { + + public static final String STATE_DIR_NAME = "_state"; + public static final String STATE_FILE_EXTENSION = ".st"; + + private final StateFileParser stateFileParser; + private final ShardStatsParser shardReader; + + @Inject + public IndicesDirectoryParser(StateFileParser stateFileParser, ShardStatsParser shardReader) { + this.stateFileParser = stateFileParser; + this.shardReader = shardReader; + } + + public IndexerDirectoryInformation parse(Path path) { + if (!Files.exists(path)) { + throw new IndexerInformationParserException("Path " + path + " does not exist."); + } + + if (!Files.isDirectory(path)) { + throw new IndexerInformationParserException("Path " + path + " is not a directory"); + } + + if (!Files.isReadable(path)) { + throw new IndexerInformationParserException("Path " + path + " is not readable"); + } + + final Path nodesPath = path.resolve("nodes"); + + if (!Files.exists(nodesPath)) { + return IndexerDirectoryInformation.empty(path); + } + + try (final Stream nodes = Files.list(nodesPath)) { + final List nodeInformation = nodes.filter(Files::isDirectory) + .filter(p -> p.getFileName().toString().matches("\\d+")) + .map(this::parseNode) + .filter(node -> !node.isEmpty()) + .toList(); + return new IndexerDirectoryInformation(path, nodeInformation); + } catch (IOException e) { + throw new IndexerInformationParserException("Failed to list nodes", e); + } + } + + private NodeInformation parseNode(Path nodePath) { + final Path indicesDir = nodePath.resolve("indices"); + if(!Files.exists(indicesDir)) { + return NodeInformation.empty(nodePath); + } + try (Stream indicesDirs = Files.list(indicesDir)) { + final StateFile state = getState(nodePath, "node"); + final List indices = indicesDirs + .map(this::parseIndex) + .sorted(Comparator.comparing(IndexInformation::indexName)) + .collect(Collectors.toList()); + return new NodeInformation(nodePath, indices, state); + } catch (IOException e) { + throw new IndexerInformationParserException("Failed to list indices directories", e); + } + } + + private StateFile getState(Path path, String stateFilePrefix) { + final Path stateFile = findStateFile(path, stateFilePrefix); + return stateFileParser.parse(stateFile); + } + + private IndexInformation parseIndex(Path path) { + final String indexID = path.getFileName().toString(); + final StateFile state = getState(path, "state"); + try (Stream shardDirs = Files.list(path)) { + final List shards = shardDirs + .filter(Files::isDirectory) + .filter(p -> p.getFileName().toString().matches("\\d+")) + .filter(p -> Files.exists(p.resolve("index"))) + .map(this::getShardInformation) + .sorted(Comparator.comparing(ShardInformation::name)) + .collect(Collectors.toList()); + return new IndexInformation(path, indexID, state, shards); + } catch (IOException e) { + throw new IndexerInformationParserException("Failed to parse shard information", e); + } + } + + private ShardInformation getShardInformation(Path path) { + final ShardStats shardStats = shardReader.read(path); + final StateFile state = getState(path, "state"); + return new ShardInformation(path, shardStats.documentsCount(), state, shardStats.minSegmentLuceneVersion()); + } + + private Path findStateFile(Path stateDir, String stateFilePrefix) { + try (Stream stateFiles = Files.list(stateDir.resolve(STATE_DIR_NAME))) { + return stateFiles + .filter(Files::isRegularFile) + .filter(file -> file.getFileName().toString().startsWith(stateFilePrefix)) + .filter(file -> file.getFileName().toString().endsWith(STATE_FILE_EXTENSION)) + .findFirst() + .orElseThrow(() -> new IndexerInformationParserException("No state file available in dir " + stateDir)); + } catch (IOException e) { + throw new IndexerInformationParserException("Failed to list state file of index" + stateDir, e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/IndexInformation.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/IndexInformation.java new file mode 100644 index 000000000000..cd8da71bd560 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/IndexInformation.java @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.dto; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.graylog.datanode.filesystem.index.statefile.StateFile; +import org.graylog.shaded.opensearch2.org.opensearch.Version; + +import java.nio.file.Path; +import java.time.Instant; +import java.time.ZoneId; +import java.util.List; +import java.util.Map; + +public record IndexInformation(@JsonIgnore Path path, String indexID, @JsonIgnore StateFile stateFile, + List shards) { + + @JsonProperty + public String indexName() { + return stateFile.document().keySet().stream().findFirst().orElseThrow(() -> new RuntimeException("Failed to read index name")); + } + + @JsonProperty + public String indexVersionCreated() { + final int versionValue = Integer.parseInt(indexSetting("index.version.created")); + return Version.fromId(versionValue).toString(); + } + + @JsonProperty + public String creationDate() { + + final long timestamp = Long.parseLong(indexSetting("index.creation_date")); + return Instant.ofEpochMilli(timestamp).atZone(ZoneId.systemDefault()).toLocalDateTime().toString(); + } + + private String indexSetting(String setting) { + final Map index = (Map) stateFile.document().get(indexName()); + Map settings = (Map) index.get("settings"); + return (String) settings.get(setting); + } + + @Override + public String toString() { + return "{" + + "indexID='" + indexID + '\'' + + ", indexName='" + indexName() + '\'' + + ", created='" + creationDate() + '\'' + + ", version='" + indexVersionCreated() + '\'' + + '}'; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/IndexerDirectoryInformation.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/IndexerDirectoryInformation.java new file mode 100644 index 000000000000..991831aab9e3 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/IndexerDirectoryInformation.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.dto; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.nio.file.Path; + import java.util.Collections; +import java.util.List; + +public record IndexerDirectoryInformation(@JsonIgnore Path path, List nodes) { + public static IndexerDirectoryInformation empty(Path path) { + return new IndexerDirectoryInformation(path, Collections.emptyList()); + } + + /** + * The property name is matching the configuration property name, to minimize any confusion. + * Jackson is by default serializing paths with file:/ prefix, so we are doing the conversion to string + * here on our own to deliver only the real path value. + */ + @JsonProperty("opensearch_data_location") + public String baseDir() { + return path.toString(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/NodeInformation.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/NodeInformation.java new file mode 100644 index 000000000000..9f901de8c794 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/NodeInformation.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.dto; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.graylog.datanode.filesystem.index.statefile.StateFile; +import org.graylog.shaded.opensearch2.org.opensearch.Version; + +import javax.annotation.Nullable; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +public record NodeInformation(@JsonIgnore java.nio.file.Path nodePath, List indices, + @JsonIgnore @Nullable StateFile stateFile) { + public static NodeInformation empty(Path nodePath) { + return new NodeInformation(nodePath, Collections.emptyList(), null); + } + + public boolean isEmpty() { + return indices.isEmpty(); + } + + @JsonProperty + public String nodeVersion() { + return Optional.ofNullable(stateFile).map(sf -> (Integer) sf.document().get("node_version")) + .map(Version::fromId) + .map(Version::toString) + .orElseGet(this::parseFromIndices); + } + + private String parseFromIndices() { + return indices.stream().map(IndexInformation::indexVersionCreated).distinct().findFirst().orElse(null); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/ShardInformation.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/ShardInformation.java new file mode 100644 index 000000000000..f14d16fdada9 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/dto/ShardInformation.java @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.dto; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.lucene.util.Version; +import org.graylog.datanode.filesystem.index.statefile.StateFile; + +import java.util.Optional; +@JsonInclude(JsonInclude.Include.NON_NULL) +public record ShardInformation(@JsonIgnore java.nio.file.Path path, int documentsCount, @JsonIgnore StateFile stateFile, + @JsonIgnore Version minSegmentLuceneVersion) { + + @JsonProperty + public String name() { + return "S" + path.getFileName().toString(); + } + + @JsonProperty + public String minLuceneVersion() { + return Optional.ofNullable(minSegmentLuceneVersion).map(Version::toString).orElse(null); + } + + @JsonProperty + public boolean primary() { + return (boolean) stateFile.document().get("primary"); + } + + @Override + public String toString() { + return "{" + + "name=" + name() + + ", documentsCount=" + documentsCount + + ", minSegmentLuceneVersion=" + minSegmentLuceneVersion + + '}'; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStats.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStats.java new file mode 100644 index 000000000000..53e00e9e3732 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStats.java @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.indexreader; + +import org.apache.lucene.util.Version; + +public record ShardStats(java.nio.file.Path path, int documentsCount, + Version minSegmentLuceneVersion) { +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParser.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParser.java new file mode 100644 index 000000000000..0aadac2e7d61 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParser.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.indexreader; + +import org.graylog.datanode.filesystem.index.IncompatibleIndexVersionException; + +import java.nio.file.Path; + +public interface ShardStatsParser { + ShardStats read(Path path) throws IncompatibleIndexVersionException; +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParserImpl.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParserImpl.java new file mode 100644 index 000000000000..77999cd14ace --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParserImpl.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.indexreader; + +import jakarta.inject.Singleton; +import org.apache.lucene.index.IndexFormatTooOldException; +import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.util.Version; +import org.graylog.datanode.filesystem.index.IncompatibleIndexVersionException; +import org.graylog.datanode.filesystem.index.IndexerInformationParserException; + +import java.io.IOException; +import java.nio.file.Path; + +@Singleton +public class ShardStatsParserImpl implements ShardStatsParser { + @Override + public ShardStats read(Path shardPath) throws IncompatibleIndexVersionException { + try (Directory directory = FSDirectory.open(shardPath.resolve("index"))) { + final StandardDirectoryReader reader = (StandardDirectoryReader) org.apache.lucene.index.DirectoryReader.open(directory); + final int documentsCount = getDocumentsCount(reader); + final Version minSegmentLuceneVersion = reader.getSegmentInfos().getMinSegmentLuceneVersion(); + return new ShardStats(shardPath, documentsCount, minSegmentLuceneVersion); + } catch (IndexFormatTooOldException e) { + throw new IncompatibleIndexVersionException(e); + } catch (IOException e) { + throw new IndexerInformationParserException("Failed to open index for read", e); + } + } + + private int getDocumentsCount(StandardDirectoryReader reader) { + // use IndexSearcher if you want to count documents smarter, filtering by field or query + // IndexSearcher searcher = new IndexSearcher(reader); + return reader.numDocs(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFile.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFile.java new file mode 100644 index 000000000000..35615038a706 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFile.java @@ -0,0 +1,20 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.statefile; + +public record StateFile(java.nio.file.Path file, java.util.Map document) { +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFileParser.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFileParser.java new file mode 100644 index 000000000000..9c5b7ca2d4cf --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFileParser.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.statefile; + +import org.graylog.datanode.filesystem.index.IndexerInformationParserException; + +import java.nio.file.Path; + +public interface StateFileParser { + StateFile parse(Path file) throws IndexerInformationParserException; +} diff --git a/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFileParserImpl.java b/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFileParserImpl.java new file mode 100644 index 000000000000..4e37dc24d1bb --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/filesystem/index/statefile/StateFileParserImpl.java @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.statefile; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.smile.SmileFactory; +import com.fasterxml.jackson.dataformat.smile.SmileGenerator; +import jakarta.inject.Singleton; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.graylog.datanode.filesystem.index.IndexerInformationParserException; +import org.graylog2.jackson.TypeReferences; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.util.Map; + +@Singleton +public class StateFileParserImpl implements StateFileParser { + + private static final String STATE_FILE_CODEC = "state"; + private static final int MIN_COMPATIBLE_STATE_FILE_VERSION = 1; + private static final int STATE_FILE_VERSION = 1; + + private final ObjectMapper objectMapper; + + public StateFileParserImpl() { + this.objectMapper = new ObjectMapper(createSmileFactory()); + } + + private SmileFactory createSmileFactory() { + final SmileFactory factory = new SmileFactory(); + // for now, this is an overhead, might make sense for web sockets + factory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); + factory.configure(SmileFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now... + // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method + factory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); + factory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); + factory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(50000000).build()); + factory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); + return factory; + } + + @Override + public StateFile parse(Path file) throws IndexerInformationParserException { + try { + return parseStateFile(file); + } catch (IOException e) { + throw new IndexerInformationParserException("Failed to parse state file", e); + } + } + + private StateFile parseStateFile(Path file) throws IOException { + final Path dir = file.getParent(); + final String filename = file.getFileName().toString(); + final FSDirectory directory = FSDirectory.open(dir); + IndexInput indexInput = EndiannessReverserUtil.openInput(directory, filename, IOContext.DEFAULT); + // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. + CodecUtil.checksumEntireFile(indexInput); + CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION); + final int xcontentTypeValue = indexInput.readInt(); + long filePointer = indexInput.getFilePointer(); + long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; + try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { + final InputStreamIndexInput input = new InputStreamIndexInput(slice, contentSize); + final Map readValue = objectMapper.readValue(input, TypeReferences.MAP_STRING_OBJECT); + return new StateFile(file, readValue); + } + } + + /** + * Lucene FSDirectory.open cannot be used from shaded classes anymore (>v12) as it checks that it is used only internally. + * Therefore, we need to clone OS's InputStreamIndexInput here to be able to use lucene's InputStream. + */ + static class InputStreamIndexInput extends InputStream { + private final IndexInput indexInput; + private final long limit; + private final long actualSizeToRead; + private long counter = 0L; + private long markPointer; + private long markCounter; + + public InputStreamIndexInput(IndexInput indexInput, long limit) { + this.indexInput = indexInput; + this.limit = limit; + if (indexInput.length() - indexInput.getFilePointer() > limit) { + this.actualSizeToRead = limit; + } else { + this.actualSizeToRead = indexInput.length() - indexInput.getFilePointer(); + } + + } + + public long actualSizeToRead() { + return this.actualSizeToRead; + } + + public int read(byte[] b, int off, int len) throws IOException { + if (b == null) { + throw new NullPointerException(); + } else if (off >= 0 && len >= 0 && len <= b.length - off) { + if (this.indexInput.getFilePointer() >= this.indexInput.length()) { + return -1; + } else { + if (this.indexInput.getFilePointer() + (long) len > this.indexInput.length()) { + len = (int) (this.indexInput.length() - this.indexInput.getFilePointer()); + } + + if (this.counter + (long) len > this.limit) { + len = (int) (this.limit - this.counter); + } + + if (len <= 0) { + return -1; + } else { + this.indexInput.readBytes(b, off, len, false); + this.counter += len; + return len; + } + } + } else { + throw new IndexOutOfBoundsException(); + } + } + + public int read() throws IOException { + if (this.counter++ >= this.limit) { + return -1; + } else { + return this.indexInput.getFilePointer() < this.indexInput.length() ? this.indexInput.readByte() & 255 : -1; + } + } + + public boolean markSupported() { + return true; + } + + public synchronized void mark(int readlimit) { + this.markPointer = this.indexInput.getFilePointer(); + this.markCounter = this.counter; + } + + public synchronized void reset() throws IOException { + this.indexInput.seek(this.markPointer); + this.counter = this.markCounter; + } + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/initializers/AuthTokenValidator.java b/data-node/src/main/java/org/graylog/datanode/initializers/AuthTokenValidator.java new file mode 100644 index 000000000000..99b950c7cee4 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/initializers/AuthTokenValidator.java @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.initializers; + +public interface AuthTokenValidator { + void verifyToken(String token) throws TokenVerificationException; +} diff --git a/data-node/src/main/java/org/graylog/datanode/initializers/JerseyService.java b/data-node/src/main/java/org/graylog/datanode/initializers/JerseyService.java new file mode 100644 index 000000000000..fe25ebf6b5fa --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/initializers/JerseyService.java @@ -0,0 +1,293 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.initializers; + +import com.codahale.metrics.InstrumentedExecutorService; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jakarta.rs.base.JsonMappingExceptionMapper; +import com.fasterxml.jackson.jakarta.rs.json.JacksonXmlBindJsonProvider; +import com.google.common.collect.ImmutableMap; +import com.google.common.eventbus.EventBus; +import com.google.common.eventbus.Subscribe; +import com.google.common.net.HostAndPort; +import com.google.common.util.concurrent.AbstractIdleService; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.ws.rs.container.DynamicFeature; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.ext.ContextResolver; +import jakarta.ws.rs.ext.ExceptionMapper; +import org.glassfish.grizzly.http.CompressionConfig; +import org.glassfish.grizzly.http.server.HttpServer; +import org.glassfish.grizzly.http.server.NetworkListener; +import org.glassfish.grizzly.ssl.SSLContextConfigurator; +import org.glassfish.grizzly.ssl.SSLEngineConfigurator; +import org.glassfish.jersey.grizzly2.httpserver.GrizzlyHttpServerFactory; +import org.glassfish.jersey.media.multipart.MultiPartFeature; +import org.glassfish.jersey.server.ResourceConfig; +import org.glassfish.jersey.server.ServerProperties; +import org.glassfish.jersey.server.model.Resource; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.opensearch.OpensearchConfigurationChangeEvent; +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; +import org.graylog.datanode.rest.config.SecuredNodeAnnotationFilter; +import org.graylog.security.certutil.csr.KeystoreInformation; +import org.graylog2.configuration.TLSProtocolsConfiguration; +import org.graylog2.plugin.inject.Graylog2Module; +import org.graylog2.rest.MoreMediaTypes; +import org.graylog2.security.JwtSecretProvider; +import org.graylog2.shared.rest.exceptionmappers.JsonProcessingExceptionMapper; +import org.graylog2.shared.security.tls.KeyStoreUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.SSLContext; +import java.net.URI; +import java.security.KeyStore; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; + +import static com.codahale.metrics.MetricRegistry.name; +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Strings.isNullOrEmpty; +import static java.util.Objects.requireNonNull; + +public class JerseyService extends AbstractIdleService { + private static final Logger LOG = LoggerFactory.getLogger(JerseyService.class); + private static final String RESOURCE_PACKAGE_WEB = "org.graylog2.web.resources"; + + private final Configuration configuration; + private final Set> systemRestResources; + + private final Set> dynamicFeatures; + private final Set> exceptionMappers; + private final ObjectMapper objectMapper; + private final MetricRegistry metricRegistry; + private final TLSProtocolsConfiguration tlsConfiguration; + + private HttpServer apiHttpServer = null; + private final ExecutorService executorService; + + private final JwtSecretProvider jwtSecretProvider; + + @Inject + public JerseyService(final Configuration configuration, + Set> dynamicFeatures, + Set> exceptionMappers, + @Named(Graylog2Module.SYSTEM_REST_RESOURCES) final Set> systemRestResources, + ObjectMapper objectMapper, + MetricRegistry metricRegistry, + TLSProtocolsConfiguration tlsConfiguration, EventBus eventBus, JwtSecretProvider jwtSecretProvider) { + this.configuration = requireNonNull(configuration, "configuration"); + this.dynamicFeatures = requireNonNull(dynamicFeatures, "dynamicFeatures"); + this.exceptionMappers = requireNonNull(exceptionMappers, "exceptionMappers"); + this.systemRestResources = systemRestResources; + this.objectMapper = requireNonNull(objectMapper, "objectMapper"); + this.metricRegistry = requireNonNull(metricRegistry, "metricRegistry"); + this.tlsConfiguration = requireNonNull(tlsConfiguration); + this.jwtSecretProvider = jwtSecretProvider; + eventBus.register(this); + this.executorService = instrumentedExecutor( + "http-worker-executor", + "http-worker-%d", + configuration.getHttpThreadPoolSize()); + } + + @Subscribe + public synchronized void handleOpensearchConfigurationChange(OpensearchConfigurationChangeEvent event) throws Exception { + if (apiHttpServer == null) { + // this is the very first start of the jersey service + LOG.info("Starting Data node REST API"); + } else { + // jersey service has been running for some time, now we received new configuration. We'll reboot the service + LOG.info("Server configuration changed, restarting Data node REST API to apply security changes"); + } + shutDown(); + doStartup(extractSslConfiguration(event.config())); + } + + private SSLEngineConfigurator extractSslConfiguration(OpensearchConfiguration config) { + return config.httpCertificate() + .map(this::buildSslEngineConfigurator) + .orElse(null); + } + + @Override + protected void startUp() { + // do nothing, the actual startup will be triggered at the moment opensearch configuration is available + } + + private void doStartup(SSLEngineConfigurator sslEngineConfigurator) throws Exception { + // we need to work around the change introduced in https://github.com/GrizzlyNIO/grizzly-mirror/commit/ba9beb2d137e708e00caf7c22603532f753ec850 + // because the PooledMemoryManager which is default now uses 10% of the heap no matter what + System.setProperty("org.glassfish.grizzly.DEFAULT_MEMORY_MANAGER", "org.glassfish.grizzly.memory.HeapMemoryManager"); + startUpApi(sslEngineConfigurator); + } + + @Override + protected void shutDown() { + shutdownHttpServer(apiHttpServer, HostAndPort.fromParts(configuration.getBindAddress(), configuration.getDatanodeHttpPort())); + } + + private void shutdownHttpServer(HttpServer httpServer, HostAndPort bindAddress) { + if (httpServer != null && httpServer.isStarted()) { + LOG.info("Shutting down HTTP listener at <{}>", bindAddress); + httpServer.shutdownNow(); + } + } + + private void startUpApi(SSLEngineConfigurator sslEngineConfigurator) throws Exception { + final String contextPath = configuration.getHttpPublishUri().getPath(); + final URI listenUri = new URI( + configuration.getUriScheme(), + null, + configuration.getBindAddress(), + configuration.getDatanodeHttpPort(), + isNullOrEmpty(contextPath) ? "/" : contextPath, + null, + null + ); + + apiHttpServer = setUp( + listenUri, + sslEngineConfigurator, + configuration.getHttpSelectorRunnersCount(), + configuration.getHttpMaxHeaderSize(), + configuration.isHttpEnableGzip(), + Set.of()); + + apiHttpServer.start(); + + LOG.info("Started REST API at <{}:{}>", configuration.getBindAddress(), configuration.getDatanodeHttpPort()); + } + + private ResourceConfig buildResourceConfig(final Set additionalResources) { + final ResourceConfig rc = new ResourceConfig() + .property(ServerProperties.BV_SEND_ERROR_IN_RESPONSE, true) + .property(ServerProperties.WADL_FEATURE_DISABLE, true) + .property(ServerProperties.MEDIA_TYPE_MAPPINGS, mediaTypeMappings()) + .registerClasses( + JacksonXmlBindJsonProvider.class, + JsonProcessingExceptionMapper.class, + JsonMappingExceptionMapper.class) + // Replacing this with a lambda leads to missing subtypes - https://github.com/Graylog2/graylog2-server/pull/10617#discussion_r630236360 + .register(new ContextResolver() { + @Override + public ObjectMapper getContext(Class type) { + return objectMapper; + } + }) + .register(MultiPartFeature.class) + .registerClasses(systemRestResources) + .registerResources(additionalResources); + + exceptionMappers.forEach(rc::registerClasses); + dynamicFeatures.forEach(rc::registerClasses); + + return rc; + } + + private Map mediaTypeMappings() { + return ImmutableMap.of( + "json", MediaType.APPLICATION_JSON_TYPE, + "ndjson", MoreMediaTypes.APPLICATION_NDJSON_TYPE, + "csv", MoreMediaTypes.TEXT_CSV_TYPE, + "log", MoreMediaTypes.TEXT_PLAIN_TYPE + ); + } + + private HttpServer setUp(URI listenUri, + SSLEngineConfigurator sslEngineConfigurator, + int selectorRunnersCount, + int maxHeaderSize, + boolean enableGzip, + Set additionalResources) { + final boolean isSecuredInstance = sslEngineConfigurator != null; + final ResourceConfig resourceConfig = buildResourceConfig(additionalResources); + + if (isSecuredInstance) { + resourceConfig.register(new JwtTokenAuthFilter(jwtSecretProvider.get())); + } + resourceConfig.register(new SecuredNodeAnnotationFilter(configuration.isInsecureStartup())); + + final HttpServer httpServer = GrizzlyHttpServerFactory.createHttpServer( + listenUri, + resourceConfig, + isSecuredInstance, + sslEngineConfigurator, + false); + + final NetworkListener listener = httpServer.getListener("grizzly"); + listener.setMaxHttpHeaderSize(maxHeaderSize); + listener.getTransport().setWorkerThreadPool(executorService); + + // The Grizzly default value is equal to `Runtime.getRuntime().availableProcessors()` which doesn't make + // sense for Graylog because we are not mainly a web server. + // See "Selector runners count" at https://grizzly.java.net/bestpractices.html for details. + listener.getTransport().setSelectorRunnersCount(selectorRunnersCount); + + if (enableGzip) { + final CompressionConfig compressionConfig = listener.getCompressionConfig(); + compressionConfig.setCompressionMode(CompressionConfig.CompressionMode.ON); + compressionConfig.setCompressionMinSize(512); + } + + return httpServer; + } + + private SSLEngineConfigurator buildSslEngineConfigurator(KeystoreInformation keystoreInformation) { + + if (keystoreInformation == null) { + throw new IllegalArgumentException("Unreadable to read private key"); + } + + final SSLContextConfigurator sslContextConfigurator = new SSLContextConfigurator(); + final char[] password = firstNonNull(keystoreInformation.password(), new char[]{}); + + try { + final KeyStore keyStore = keystoreInformation.loadKeystore(); + sslContextConfigurator.setKeyStorePass(password); + sslContextConfigurator.setKeyStoreBytes(KeyStoreUtils.getBytes(keyStore, password)); + + final SSLContext sslContext = sslContextConfigurator.createSSLContext(true); + final SSLEngineConfigurator sslEngineConfigurator = new SSLEngineConfigurator(sslContext, false, false, false); + sslEngineConfigurator.setEnabledProtocols(tlsConfiguration.getEnabledTlsProtocols().toArray(new String[0])); + return sslEngineConfigurator; + } catch (Exception e) { + throw new RuntimeException("Could not read keystore: " + e.getMessage(), e); + } + } + + private ExecutorService instrumentedExecutor(final String executorName, + final String threadNameFormat, + int poolSize) { + final ThreadFactory threadFactory = new ThreadFactoryBuilder() + .setNameFormat(threadNameFormat) + .setDaemon(true) + .build(); + + return new InstrumentedExecutorService( + Executors.newFixedThreadPool(poolSize, threadFactory), + metricRegistry, + name(JerseyService.class, executorName)); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/initializers/JwtTokenAuthFilter.java b/data-node/src/main/java/org/graylog/datanode/initializers/JwtTokenAuthFilter.java new file mode 100644 index 000000000000..05628da5f69b --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/initializers/JwtTokenAuthFilter.java @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.initializers; + +import io.jsonwebtoken.JwtParser; +import io.jsonwebtoken.Jwts; +import io.jsonwebtoken.UnsupportedJwtException; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerRequestFilter; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.MultivaluedMap; +import jakarta.ws.rs.core.Response; +import org.graylog2.security.JwtSecret; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.crypto.SecretKey; +import java.io.IOException; +import java.util.Collections; +import java.util.Optional; + +@Singleton +public class JwtTokenAuthFilter implements ContainerRequestFilter { + + private static final Logger LOG = LoggerFactory.getLogger(JwtTokenAuthFilter.class); + + private static final String AUTHENTICATION_SCHEME = "Bearer"; + public static final String REQUIRED_SUBJECT = "admin"; + public static final String REQUIRED_ISSUER = "graylog"; + private final JwtSecret jwtSecret; + + @Inject + public JwtTokenAuthFilter(JwtSecret jwtSecret) { + this.jwtSecret = jwtSecret; + } + + @Override + public void filter(ContainerRequestContext requestContext) throws IOException { + final Optional header = getBearerHeader(requestContext); + if (header.isEmpty()) { + // no JWT token, we'll fail immediately + abortRequest(requestContext); + } else { + final String token = header.map(h -> h.replaceFirst(AUTHENTICATION_SCHEME + " ", "")).get(); + try { + verifyToken(token); + } catch (TokenVerificationException e) { + LOG.error("Failed to verify auth token", e); + abortRequest(requestContext); + } + } + } + + private Optional getBearerHeader(ContainerRequestContext requestContext) { + final MultivaluedMap headers = requestContext.getHeaders(); + return headers.getOrDefault(HttpHeaders.AUTHORIZATION, Collections.emptyList()) + .stream() + .filter(a -> a.startsWith(AUTHENTICATION_SCHEME)) + .findFirst(); + } + + void verifyToken(String token) throws TokenVerificationException { + final SecretKey key = this.jwtSecret.getSigningKey(); + final JwtParser parser = Jwts.parser() + .verifyWith(key) + .requireSubject(REQUIRED_SUBJECT) + .requireIssuer(REQUIRED_ISSUER) + .build(); + try { + parser.parse(token); + } catch (UnsupportedJwtException e) { + throw new TokenVerificationException("Token format/configuration is not supported", e); + } catch (Throwable e) { + throw new TokenVerificationException(e); + } + } + + + private void abortRequest(ContainerRequestContext requestContext) { + requestContext.abortWith(Response.status(Response.Status.UNAUTHORIZED) + .entity("Failed to parse auth header") + .type(MediaType.TEXT_PLAIN_TYPE) + .build()); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/initializers/PeriodicalsService.java b/data-node/src/main/java/org/graylog/datanode/initializers/PeriodicalsService.java new file mode 100644 index 000000000000..62303d818e2a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/initializers/PeriodicalsService.java @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.initializers; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; +import com.google.common.util.concurrent.AbstractIdleService; +import org.graylog2.periodical.Periodicals; +import org.graylog2.plugin.periodical.Periodical; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +@Singleton +public class PeriodicalsService extends AbstractIdleService { + private static final Logger LOG = LoggerFactory.getLogger(PeriodicalsService.class); + + private final Periodicals periodicals; + private final Set allPeriodicals; + private final Set leaderNodePeriodicals = new HashSet<>(); + private final Set anyNodePeriodicals = new HashSet<>(); + + @Inject + public PeriodicalsService(Periodicals periodicals, Set allPeriodicals) { + this.periodicals = periodicals; + this.allPeriodicals = allPeriodicals; + + allPeriodicals.forEach(p -> { + if (p.leaderOnly()) { + leaderNodePeriodicals.add(p); + } else { + anyNodePeriodicals.add(p); + } + }); + } + + @Override + protected void startUp() throws Exception { + LOG.info("Starting {} periodicals ...", anyNodePeriodicals.size()); + LOG.info("Delaying start of {} periodicals until this node becomes leader ...", leaderNodePeriodicals.size()); + startPeriodicals(anyNodePeriodicals); + } + + private synchronized void startPeriodicals(Set periodicalsToStart) { + final Sets.SetView notYetStartedPeriodicals = + Sets.difference(periodicalsToStart, ImmutableSet.copyOf(periodicals.getAll())); + + int numOfPeriodicalsToSkip = periodicalsToStart.size() - notYetStartedPeriodicals.size(); + + if (numOfPeriodicalsToSkip > 0) { + LOG.warn("Skipping start of {} periodicals which have already been started.", numOfPeriodicalsToSkip); + } + + for (Periodical periodical : notYetStartedPeriodicals) { + try { + periodical.initialize(); + + if (!periodical.startOnThisNode()) { + LOG.info("Not starting [{}] periodical. Not configured to run on this node.", periodical.getClass().getCanonicalName()); + continue; + } + + // Register and start. + periodicals.registerAndStart(periodical); + } catch (Exception e) { + LOG.error("Could not initialize periodical.", e); + } + } + } + + private synchronized void stopPeriodicals(Collection periodicalsToStop) { + periodicalsToStop.forEach(periodicals::unregisterAndStop); + } + + @Override + protected void shutDown() throws Exception { + stopPeriodicals(periodicals.getAllStoppedOnGracefulShutdown()); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/initializers/TokenVerificationException.java b/data-node/src/main/java/org/graylog/datanode/initializers/TokenVerificationException.java new file mode 100644 index 000000000000..52c812dd0679 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/initializers/TokenVerificationException.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.initializers; + +public class TokenVerificationException extends Exception { + public TokenVerificationException(Throwable cause) { + super(cause); + } + + public TokenVerificationException(String message) { + super(message); + } + + public TokenVerificationException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/metrics/ClusterStatMetrics.java b/data-node/src/main/java/org/graylog/datanode/metrics/ClusterStatMetrics.java new file mode 100644 index 000000000000..0ee8e633cf8f --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/metrics/ClusterStatMetrics.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.metrics; + +import org.graylog2.indexer.datastream.policy.actions.RollupAction; + +import java.util.Locale; + +public enum ClusterStatMetrics { + DOC_COUNT("long", new RollupAction.IsmRollup.AvgMetric(), "$._all.primaries.docs.count", false), + SHARDS_TOTAL("integer", new RollupAction.IsmRollup.AvgMetric(), "$._shards.total", false), + SHARDS_SUCCESSFUL("integer", new RollupAction.IsmRollup.AvgMetric(), "$._shards.successful", false), + SHARDS_FAILED("integer", new RollupAction.IsmRollup.AvgMetric(), "$._shards.failed", false), + SEARCH_LATENCY("integer", new RollupAction.IsmRollup.AvgMetric(), "$._all.total.search.query_time_in_millis", true), + INDEX_LATENCY("integer", new RollupAction.IsmRollup.AvgMetric(), "$._all.total.indexing.index_time_in_millis", true), + SEARCH_OPS("long", new RollupAction.IsmRollup.AvgMetric(), "$._all.total.search.query_total", true), + INDEX_OPS("long", new RollupAction.IsmRollup.AvgMetric(), "$._all.total.indexing.index_total", true), + + ; + private final String mappingType; + private final RollupAction.IsmRollup.AggregationMetric aggregationMetric; + private final String clusterStat; + private final boolean rateMetric; + private static final String RATE_SUFFIX = "_rate"; + + + ClusterStatMetrics(String mappingType, RollupAction.IsmRollup.AggregationMetric aggregationMetric, String clusterStat, boolean rateMetric) { + this.mappingType = mappingType; + this.aggregationMetric = aggregationMetric; + this.clusterStat = clusterStat; + this.rateMetric = rateMetric; + } + + public String getMappingType() { + return mappingType; + } + + public RollupAction.IsmRollup.AggregationMetric getAggregationMetric() { + return aggregationMetric; + } + + public String getFieldName() { + return name().toLowerCase(Locale.ROOT); + } + + public String getRateFieldName() { + if (!isRateMetric()) { + throw new RuntimeException("Metric is not a rate metric"); + } + return getFieldName() + RATE_SUFFIX; + } + + public String getClusterStat() { + return clusterStat; + } + + public boolean isRateMetric() { + return rateMetric; + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/metrics/ClusterStatMetricsCollector.java b/data-node/src/main/java/org/graylog/datanode/metrics/ClusterStatMetricsCollector.java new file mode 100644 index 000000000000..1779ee1b08f4 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/metrics/ClusterStatMetricsCollector.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.metrics; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.jayway.jsonpath.DocumentContext; +import com.jayway.jsonpath.JsonPath; +import org.graylog.shaded.opensearch2.org.opensearch.client.Request; +import org.graylog.shaded.opensearch2.org.opensearch.client.Response; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +public class ClusterStatMetricsCollector { + + Logger log = LoggerFactory.getLogger(ClusterStatMetricsCollector.class); + + private final RestHighLevelClient client; + private final ObjectMapper objectMapper; + + public ClusterStatMetricsCollector(RestHighLevelClient client, ObjectMapper objectMapper) { + this.client = client; + this.objectMapper = objectMapper; + } + + public Map getClusterMetrics(Map previousMetrics) { + Request clusterStatRequest = new Request("GET", "_stats"); + try { + Response response = client.getLowLevelClient().performRequest(clusterStatRequest); + JsonNode responseNode = objectMapper.readValue(response.getEntity().getContent(), JsonNode.class); + + if (responseNode != null) { + DocumentContext statContext = JsonPath.parse(responseNode.toString()); + + Map metrics = new HashMap<>(); + + Arrays.stream(ClusterStatMetrics.values()) + .filter(m -> Objects.nonNull(m.getClusterStat())) + .forEach(metric -> { + String fieldName = metric.getFieldName(); + try { + Object value = statContext.read(metric.getClusterStat()); + if (value instanceof Number current && metric.isRateMetric() && previousMetrics.containsKey(fieldName)) { + Number previous = (Number) previousMetrics.get(fieldName); + long rate = current.longValue() - previous.longValue(); + if (rate > 0) { + metrics.put(metric.getRateFieldName(), rate); + } + } + metrics.put(fieldName, value); + } catch (Exception e) { + log.error("Could not retrieve cluster metric {}", fieldName); + } + }); + + return metrics; + } + + throw new IOException("No cluster stats returned"); + } catch (IOException e) { + log.error("Error retrieving cluster stats", e); + } + return Map.of(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/metrics/ConfigureMetricsIndexSettings.java b/data-node/src/main/java/org/graylog/datanode/metrics/ConfigureMetricsIndexSettings.java new file mode 100644 index 000000000000..c8cecb118985 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/metrics/ConfigureMetricsIndexSettings.java @@ -0,0 +1,180 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.metrics; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import jakarta.inject.Inject; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTracer; +import org.graylog.datanode.periodicals.MetricsCollector; +import org.graylog.storage.opensearch2.DataStreamAdapterOS2; +import org.graylog.storage.opensearch2.ism.IsmApi; +import org.graylog2.cluster.nodes.DataNodeDto; +import org.graylog2.cluster.nodes.NodeService; +import org.graylog2.indexer.datastream.DataStreamService; +import org.graylog2.indexer.datastream.DataStreamServiceImpl; +import org.graylog2.indexer.datastream.policy.IsmPolicy; +import org.graylog2.indexer.datastream.policy.Policy; +import org.graylog2.indexer.datastream.policy.actions.Action; +import org.graylog2.indexer.datastream.policy.actions.DeleteAction; +import org.graylog2.indexer.datastream.policy.actions.RolloverAction; +import org.graylog2.indexer.datastream.policy.actions.RollupAction; +import org.graylog2.indexer.datastream.policy.actions.TimesUnit; +import org.graylog2.indexer.fieldtypes.IndexFieldTypesService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class ConfigureMetricsIndexSettings implements StateMachineTracer { + + private final Logger log = LoggerFactory.getLogger(ConfigureMetricsIndexSettings.class); + + private final OpensearchProcess process; + private final Configuration configuration; + private final IndexFieldTypesService indexFieldTypesService; + private final ObjectMapper objectMapper; + private DataStreamService dataStreamService; + private final NodeService nodeService; + + @Inject + public ConfigureMetricsIndexSettings(OpensearchProcess process, Configuration configuration, IndexFieldTypesService indexFieldTypesService, ObjectMapper objectMapper, NodeService nodeService) { + this.process = process; + this.configuration = configuration; + this.objectMapper = objectMapper; + this.indexFieldTypesService = indexFieldTypesService; + this.nodeService = nodeService; + } + + @Override + public void trigger(OpensearchEvent trigger) { + } + + @Override + public void transition(OpensearchEvent trigger, OpensearchState source, OpensearchState destination) { + if (destination == OpensearchState.AVAILABLE && source == OpensearchState.STARTING && process.isManagerNode()) { + process.openSearchClient().ifPresent(client -> { + final IsmApi ismApi = new IsmApi(client, objectMapper); + int replicas = nodeService.allActive().size() == 1 ? 0 : 1; + dataStreamService = new DataStreamServiceImpl( + new DataStreamAdapterOS2(client, objectMapper, ismApi), + indexFieldTypesService, + replicas + ); + dataStreamService.createDataStream(configuration.getMetricsStream(), + configuration.getMetricsTimestamp(), + createMappings(), + createPolicy(configuration)); + }); + } + } + + private Map> createMappings() { + Map> mappings = new HashMap<>(); + mappings.put("node", ImmutableMap.of("type", "keyword")); + + mappings.putAll(MetricsCollector.getDatanodeMetrics()); + + mappings.putAll( + Arrays.stream(NodeStatMetrics.values()).collect(Collectors.toMap( + NodeStatMetrics::getFieldName, metric -> ImmutableMap.of("type", metric.getMappingType()) + )) + ); + mappings.putAll( + Arrays.stream(ClusterStatMetrics.values()).collect(Collectors.toMap( + ClusterStatMetrics::getFieldName, metric -> ImmutableMap.of("type", metric.getMappingType()) + )) + ); + mappings.putAll( + Arrays.stream(ClusterStatMetrics.values()).filter(ClusterStatMetrics::isRateMetric).collect(Collectors.toMap( + ClusterStatMetrics::getRateFieldName, metric -> ImmutableMap.of("type", metric.getMappingType()) + )) + ); + return mappings; + } + + private IsmPolicy createPolicy(Configuration configuration) { + // states defined from last to first to use name in previous step + Policy.State stateDelete = ismDeleteState(); + Policy.State stateRollup = ismRollupState(stateDelete.name(), configuration); + Policy.State stateOpen = ismOpenState(stateRollup.name()); + + Policy policy = new Policy(null, + "Manages rollover, rollup and deletion of data note metrics indices", + null, + stateOpen.name(), ImmutableList.of(stateOpen, stateRollup, stateDelete), + null); + + try { + log.debug("Creating ISM configuration for metrics data stream {}", + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(policy)); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + + return new IsmPolicy(configuration.getMetricsPolicy(), policy); + } + + private Policy.State ismDeleteState() { + final List actions = ImmutableList.of(new Action(new DeleteAction())); + final List transitions = ImmutableList.of(); + return new Policy.State("delete", actions, transitions); + } + + private Policy.State ismRollupState(String nextState, Configuration configuration) { + List rollupFields = Arrays.stream(NodeStatMetrics.values()) + .map(metric -> new RollupAction.IsmRollup.Metric(metric.getFieldName(), ImmutableList.of(metric.getAggregationMetric()))) + .collect(Collectors.toList()); + rollupFields.addAll(Arrays.stream(ClusterStatMetrics.values()) + .map(metric -> new RollupAction.IsmRollup.Metric(metric.getFieldName(), ImmutableList.of(metric.getAggregationMetric()))) + .toList()); + + final RollupAction.IsmRollup ismRollup = new RollupAction.IsmRollup( + configuration.getMetricsDailyIndex(), + "Rollup Data Stream Index", + 1000, + ImmutableList.of( + new RollupAction.IsmRollup.DateHistogram( + configuration.getMetricsTimestamp(), + "60m", "UTC") + ), + rollupFields + ); + RollupAction rollupAction = new RollupAction(ismRollup); + final List actions = ImmutableList.of(new Action(rollupAction)); + final List transitions = ImmutableList.of(new Policy.Transition(nextState, + new Policy.Condition(TimesUnit.DAYS.format(configuration.getMetricsRetention().toDays())))); + return new Policy.State("rollup", actions, transitions); + } + + private Policy.State ismOpenState(String nextState) { + final List actions = ImmutableList.of(new Action(new RolloverAction("1d", null))); + final List transitions = ImmutableList.of(new Policy.Transition(nextState, null)); + return new Policy.State("open", actions, transitions); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/metrics/NodeMetricsCollector.java b/data-node/src/main/java/org/graylog/datanode/metrics/NodeMetricsCollector.java new file mode 100644 index 000000000000..1cd1356662a0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/metrics/NodeMetricsCollector.java @@ -0,0 +1,87 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.metrics; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.jayway.jsonpath.Criteria; +import com.jayway.jsonpath.DocumentContext; +import com.jayway.jsonpath.Filter; +import com.jayway.jsonpath.JsonPath; +import org.graylog.shaded.opensearch2.org.opensearch.client.Request; +import org.graylog.shaded.opensearch2.org.opensearch.client.Response; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +public class NodeMetricsCollector { + + Logger log = LoggerFactory.getLogger(NodeMetricsCollector.class); + + private final RestHighLevelClient client; + private final ObjectMapper objectMapper; + + public NodeMetricsCollector(RestHighLevelClient client, ObjectMapper objectMapper) { + this.client = client; + this.objectMapper = objectMapper; + } + + public Map getNodeMetrics(String node) { + Map metrics = new HashMap<>(); + + Request nodeStatRequest = new Request("GET", "_nodes/" + node + "/stats"); + final DocumentContext nodeContext = getNodeContextFromRequest(node, nodeStatRequest); + + if (Objects.nonNull(nodeContext)) { + Arrays.stream(NodeStatMetrics.values()) + .filter(m -> Objects.nonNull(m.getNodeStat())) + .forEach(metric -> { + try { + metrics.put(metric.getFieldName(), metric.mapValue(nodeContext.read(metric.getNodeStat()))); + } catch (Exception e) { + log.error("Could not retrieve metric {} for node {}", metric.getFieldName(), node); + } + }); + } + + return metrics; + } + + private DocumentContext getNodeContextFromRequest(String node, Request nodeStatRequest) { + try { + Response response = client.getLowLevelClient().performRequest(nodeStatRequest); + Filter nodeFilter = Filter.filter(Criteria.where("name").eq(node)); + Object nodeStatNode = JsonPath.read(response.getEntity().getContent(), "$['nodes'][*][?]", nodeFilter); + + if (nodeStatNode != null) { + JsonNode nodeStats = objectMapper.convertValue(nodeStatNode, JsonNode.class); + return JsonPath.parse(nodeStats.get(0).toString()); + } + throw new IOException("No node stats returned for node"); + } catch (IOException e) { + log.error("Error retrieving node stats for node {}", node, e); + } + return null; + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/metrics/NodeStatMetrics.java b/data-node/src/main/java/org/graylog/datanode/metrics/NodeStatMetrics.java new file mode 100644 index 000000000000..4abc6d3f65e0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/metrics/NodeStatMetrics.java @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.metrics; + +import org.graylog2.indexer.datastream.policy.actions.RollupAction; + +import java.util.Locale; +import java.util.Objects; +import java.util.function.Function; + +public enum NodeStatMetrics { + CPU_LOAD("float", new RollupAction.IsmRollup.AvgMetric(), "$.os.cpu.load_average.1m"), + MEM_FREE("float", new RollupAction.IsmRollup.AvgMetric(), "$.os.mem.free_in_bytes", NodeStatMetrics::bytesToMb), + MEM_TOTAL_USED("integer", new RollupAction.IsmRollup.AvgMetric(), "$.os.mem.used_percent"), + MEM_HEAP_USED("integer", new RollupAction.IsmRollup.AvgMetric(), "$.jvm.mem.heap_used_percent"), + DISK_FREE("float", new RollupAction.IsmRollup.AvgMetric(), "$.fs.total.available_in_bytes", NodeStatMetrics::bytesToGb), + THREAD_POOL_WRITE_THREADS("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.write.threads"), + THREAD_POOL_WRITE_QUEUE("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.write.queue"), + THREAD_POOL_WRITE_REJECTED("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.write.rejected"), + THREAD_POOL_SEARCH_THREADS("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.search.threads"), + THREAD_POOL_SEARCH_QUEUE("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.search.queue"), + THREAD_POOL_SEARCH_REJECTED("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.search.rejected"), + THREAD_POOL_MERGE_THREADS("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.force_merge.threads"), + THREAD_POOL_MERGE_QUEUE("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.force_merge.queue"), + THREAD_POOL_MERGE_REJECTED("integer", new RollupAction.IsmRollup.AvgMetric(), "$.thread_pool.force_merge.rejected"), + ; + + private static float bytesToMb(Object v) { + var number = (v instanceof Long) ? (long) v : (int) v; + return number / (float) (1024 * 1024); + } + + private static float bytesToGb(Object v) { + var number = (v instanceof Long) ? (long) v : (int) v; + return number / (float) (1024 * 1024 * 1024); + } + + private final String mappingType; + private final RollupAction.IsmRollup.AggregationMetric aggregationMetric; + private final String nodeStat; + private final Function mappingFunction; + + NodeStatMetrics(String mappingType, RollupAction.IsmRollup.AggregationMetric aggregationMetric, String nodeStat) { + this(mappingType, aggregationMetric, nodeStat, null); + } + + NodeStatMetrics(String mappingType, RollupAction.IsmRollup.AggregationMetric aggregationMetric, String nodeStat, Function mappingFunction) { + this.mappingType = mappingType; + this.aggregationMetric = aggregationMetric; + this.nodeStat = nodeStat; + this.mappingFunction = mappingFunction; + } + + public String getMappingType() { + return mappingType; + } + + public RollupAction.IsmRollup.AggregationMetric getAggregationMetric() { + return aggregationMetric; + } + + public String getFieldName() { + return name().toLowerCase(Locale.ROOT); + } + + public String getNodeStat() { + return nodeStat; + } + + public Object mapValue(Object value) { + if (Objects.isNull(mappingFunction)) { + return value; + } + return mappingFunction.apply(value); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/CsrRequester.java b/data-node/src/main/java/org/graylog/datanode/opensearch/CsrRequester.java new file mode 100644 index 000000000000..c2b325a6bcdf --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/CsrRequester.java @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +public interface CsrRequester { + void triggerCertificateSigningRequest(); +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/CsrRequesterImpl.java b/data-node/src/main/java/org/graylog/datanode/opensearch/CsrRequesterImpl.java new file mode 100644 index 000000000000..2dc639ad04e2 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/CsrRequesterImpl.java @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +import com.google.common.collect.ImmutableList; +import jakarta.inject.Inject; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.configuration.DatanodeKeystoreException; +import org.graylog.security.certutil.csr.exceptions.CSRGenerationException; +import org.graylog2.cluster.NodeNotFoundException; +import org.graylog2.cluster.certificates.CertificateExchange; +import org.graylog2.cluster.certificates.CertificateSigningRequest; +import org.graylog2.cluster.nodes.DataNodeDto; +import org.graylog2.cluster.nodes.NodeService; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.shared.SuppressForbidden; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.InetAddress; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class CsrRequesterImpl implements CsrRequester { + + private static final Logger LOG = LoggerFactory.getLogger(CsrRequesterImpl.class); + + private final NodeId nodeId; + + private final DatanodeKeystore datanodeKeystore; + + private final CertificateExchange certificateExchange; + private final String hostname; + + @Inject + public CsrRequesterImpl(Configuration datanodeConfiguration, NodeId nodeId, DatanodeKeystore datanodeKeystore, CertificateExchange certificateExchange) { + this.hostname = datanodeConfiguration.getHostname(); + this.nodeId = nodeId; + this.datanodeKeystore = datanodeKeystore; + this.certificateExchange = certificateExchange; + } + + public void triggerCertificateSigningRequest() { + try { + final var altNames = ImmutableList.builder() + .addAll(determineAltNames()) + .build(); + final PKCS10CertificationRequest csr = datanodeKeystore.createCertificateSigningRequest(hostname, altNames); + certificateExchange.requestCertificate(new CertificateSigningRequest(nodeId.getNodeId(), csr)); + LOG.info("Triggered certificate signing request for this datanode"); + } catch (CSRGenerationException | IOException | DatanodeKeystoreException ex) { + throw new RuntimeException(ex); + } + } + + + private Iterable determineAltNames() { + return Stream.of("127.0.0.1", "::1") + .map(this::reverseLookup) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + } + + @SuppressForbidden("Deliberate use of InetAddress#getHostName") + private String reverseLookup(String ipAddress) { + try { + final var inetAddress = InetAddress.getByName(ipAddress); + final var reverseLookup = inetAddress.getHostName(); + return reverseLookup.equals(ipAddress) ? null : reverseLookup; + } catch (Exception e) { + return null; + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchConfigurationChangeEvent.java b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchConfigurationChangeEvent.java new file mode 100644 index 000000000000..deb5886366e4 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchConfigurationChangeEvent.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; + +public record OpensearchConfigurationChangeEvent(OpensearchConfiguration config) { +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchInfo.java b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchInfo.java new file mode 100644 index 000000000000..e9fdf14df948 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchInfo.java @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.process.ProcessInformation; + +public record OpensearchInfo(String nodeName, OpensearchState state, String restBaseUrl, + ProcessInformation process) { +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcess.java b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcess.java new file mode 100644 index 000000000000..d86157ab6772 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcess.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.process.ManagableProcess; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.graylog.storage.opensearch2.OpenSearchClient; + +import java.net.URI; +import java.util.List; +import java.util.Optional; + +public interface OpensearchProcess extends ManagableProcess { + + OpensearchInfo processInfo(); + + Optional restClient(); + + Optional openSearchClient(); + List stdOutLogs(); + List stdErrLogs(); + + URI getOpensearchBaseUrl(); + String getOpensearchClusterUrl(); + String getDatanodeRestApiUrl(); + + List getOpensearchRoles(); + + void remove(); + + void reset(); + + void available(); + boolean isManagerNode(); +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcessImpl.java b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcessImpl.java new file mode 100644 index 000000000000..1b1207707267 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcessImpl.java @@ -0,0 +1,392 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.eventbus.EventBus; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import jakarta.inject.Inject; +import org.apache.commons.collections4.queue.CircularFifoQueue; +import org.apache.commons.exec.ExecuteException; +import org.apache.http.client.utils.URIBuilder; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.opensearch.cli.OpensearchCommandLineProcess; +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; +import org.graylog.datanode.opensearch.rest.OpensearchRestClient; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; +import org.graylog.datanode.periodicals.ClusterStateResponse; +import org.graylog.datanode.process.ProcessInformation; +import org.graylog.datanode.process.ProcessListener; +import org.graylog.shaded.opensearch2.org.opensearch.OpenSearchStatusException; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.health.ClusterHealthRequest; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.health.ClusterHealthResponse; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.settings.ClusterGetSettingsRequest; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.settings.ClusterGetSettingsResponse; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; +import org.graylog.shaded.opensearch2.org.opensearch.client.ClusterClient; +import org.graylog.shaded.opensearch2.org.opensearch.client.Request; +import org.graylog.shaded.opensearch2.org.opensearch.client.RequestOptions; +import org.graylog.shaded.opensearch2.org.opensearch.client.Response; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.graylog.shaded.opensearch2.org.opensearch.common.settings.Settings; +import org.graylog.storage.opensearch2.OpenSearchClient; +import org.graylog2.datanode.DataNodeLifecycleEvent; +import org.graylog2.datanode.DataNodeLifecycleTrigger; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.security.CustomCAX509TrustManager; +import org.graylog2.security.TrustManagerAggregator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nonnull; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import java.io.IOException; +import java.net.URI; +import java.security.KeyStore; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; +import java.util.Queue; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +public class OpensearchProcessImpl implements OpensearchProcess, ProcessListener { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchProcessImpl.class); + + + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private Optional opensearchConfiguration = Optional.empty(); + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private Optional restClient = Optional.empty(); + private Optional openSearchClient = Optional.empty(); + + private final OpensearchStateMachine processState; + + private final DatanodeConfiguration datanodeConfiguration; + + private OpensearchCommandLineProcess commandLineProcess; + + private final Queue stdout; + private final Queue stderr; + private final CustomCAX509TrustManager trustManager; + private final Configuration configuration; + private final ObjectMapper objectMapper; + private final String nodeName; + private final NodeId nodeId; + private final EventBus eventBus; + + + static final String CLUSTER_ROUTING_ALLOCATION_EXCLUDE_SETTING = "cluster.routing.allocation.exclude._name"; + boolean allocationExcludeChecked = false; + ScheduledExecutorService executorService; + + @Inject + OpensearchProcessImpl(DatanodeConfiguration datanodeConfiguration, final CustomCAX509TrustManager trustManager, + final Configuration configuration, + ObjectMapper objectMapper, OpensearchStateMachine processState, NodeId nodeId, EventBus eventBus) { + this.datanodeConfiguration = datanodeConfiguration; + this.processState = processState; + this.stdout = new CircularFifoQueue<>(datanodeConfiguration.processLogsBufferSize()); + this.stderr = new CircularFifoQueue<>(datanodeConfiguration.processLogsBufferSize()); + this.trustManager = trustManager; + this.configuration = configuration; + this.objectMapper = objectMapper; + this.nodeName = configuration.getDatanodeNodeName(); + this.nodeId = nodeId; + this.eventBus = eventBus; + } + + private RestHighLevelClient createRestClient(OpensearchConfiguration configuration) { + + final TrustManager trustManager = createAggregatedTrustManager(configuration.trustStore()); + + return OpensearchRestClient.build(configuration, datanodeConfiguration, trustManager); + } + + /** + * We have to combine the system-wide trust manager with a manager that trusts certificates used to secure + * the datanode's opensearch process. + * + * @param truststore truststore containing certificates used to secure datanode's opensearch + * @return combined trust manager + */ + @Nonnull + private X509TrustManager createAggregatedTrustManager(KeyStore truststore) { + return new TrustManagerAggregator(List.of(this.trustManager, TrustManagerAggregator.trustManagerFromKeystore(truststore))); + } + + @Override + public List stdOutLogs() { + return stdout.stream().toList(); + } + + @Override + public List stdErrLogs() { + return stderr.stream().toList(); + } + + public Optional restClient() { + return restClient; + } + + public Optional openSearchClient() { + return openSearchClient; + } + + public OpensearchInfo processInfo() { + return new OpensearchInfo(configuration.getDatanodeNodeName(), processState.getState(), getOpensearchBaseUrl().toString(), commandLineProcess != null ? commandLineProcess.processInfo() : ProcessInformation.empty()); + } + + @Override + public URI getOpensearchBaseUrl() { + final String baseUrl = opensearchConfiguration.map(OpensearchConfiguration::getRestBaseUrl) + .map(httpHost -> new URIBuilder() + .setHost(httpHost.getHostName()) + .setPort(httpHost.getPort()) + .setScheme(httpHost.getSchemeName()).toString()) + .orElse(""); // Empty address will cause problems for opensearch clients. Has to be filtered out in IndexerDiscoveryProvider + return URI.create(baseUrl); + } + + @Override + public String getOpensearchClusterUrl() { + return configuration.getDatanodeNodeName() + ":" + configuration.getOpensearchTransportPort(); + } + + @Override + public String getDatanodeRestApiUrl() { + final boolean secured = opensearchConfiguration.flatMap(OpensearchConfiguration::httpCertificate).isPresent(); + String protocol = secured ? "https" : "http"; + String host = configuration.getHostname(); + final int port = configuration.getDatanodeHttpPort(); + return String.format(Locale.ROOT, "%s://%s:%d", protocol, host, port); + } + + @Override + public List getOpensearchRoles() { + return opensearchConfiguration.map(OpensearchConfiguration::opensearchRoles).orElse(List.of()); + } + + public void onEvent(OpensearchEvent event) { + LOG.debug("Process event: " + event); + this.processState.fire(event); + } + + public boolean isInState(OpensearchState expectedState) { + return this.processState.getState().equals(expectedState); + } + + @Override + public void configure(OpensearchConfiguration configuration) { + this.opensearchConfiguration = Optional.of(configuration); + configure(); + } + + private void configure() { + opensearchConfiguration.ifPresentOrElse( + (config -> { + // refresh TM if the SSL certs changed + trustManager.refresh(); + }), + () -> {throw new IllegalArgumentException("Opensearch configuration required but not supplied!");} + ); + } + + @Override + public synchronized void start() { + opensearchConfiguration.ifPresentOrElse( + (config -> { + boolean startedPreviously = Objects.nonNull(commandLineProcess) && commandLineProcess.processInfo().alive(); + if (startedPreviously) { + stop(); + } + + commandLineProcess = new OpensearchCommandLineProcess(config, this); + commandLineProcess.start(); + + restClient = Optional.of(createRestClient(config)); + openSearchClient = restClient.map(c -> new OpenSearchClient(c, objectMapper)); + }), + () -> {throw new IllegalArgumentException("Opensearch configuration required but not supplied!");} + ); + } + + /** + * reset allocation exclude status on restart to allow removed nodes to rejoin the cluster + */ + private void checkAllocationEnabledStatus() { + if (restClient().isPresent()) { + ClusterClient clusterClient = restClient().get().cluster(); + try { + final ClusterGetSettingsResponse settings = + clusterClient.getSettings(new ClusterGetSettingsRequest(), RequestOptions.DEFAULT); + final String setting = settings.getSetting(CLUSTER_ROUTING_ALLOCATION_EXCLUDE_SETTING); + if (nodeName.equals(setting)) { + ClusterUpdateSettingsRequest updateSettings = new ClusterUpdateSettingsRequest(); + updateSettings.transientSettings(Settings.builder() + .putNull(CLUSTER_ROUTING_ALLOCATION_EXCLUDE_SETTING) + .build()); + clusterClient.putSettings(updateSettings, RequestOptions.DEFAULT); + } + allocationExcludeChecked = true; + } catch (IOException e) { + throw new RuntimeException("Error getting cluster settings from OpenSearch", e); + } + } + } + + + @Override + public synchronized void stop() { + stopProcess(); + stopRestClient(); + } + + private void stopRestClient() { + restClient().ifPresent(client -> { + try { + client.close(); + } catch (IOException e) { + LOG.warn("Failed to close rest client", e); + } + }); + } + + private void stopProcess() { + if (this.commandLineProcess != null) { + commandLineProcess.close(); + } + } + + @Override + public void remove() { + LOG.info("Starting removal of OpenSearch node"); + restClient().ifPresent(client -> { + final ClusterClient clusterClient = client.cluster(); + ClusterUpdateSettingsRequest settings = new ClusterUpdateSettingsRequest(); + settings.transientSettings(Settings.builder() + .put(CLUSTER_ROUTING_ALLOCATION_EXCLUDE_SETTING, nodeName) + .build()); + try { + final ClusterUpdateSettingsResponse response = + clusterClient.putSettings(settings, RequestOptions.DEFAULT); + if (response.isAcknowledged()) { + allocationExcludeChecked = false; // reset to rejoin cluster in case of failure + executorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactoryBuilder().setNameFormat("datanode-removal").build()); + executorService.scheduleAtFixedRate(this::checkRemovalStatus, 10, 10, TimeUnit.SECONDS); + } else { + throw new RuntimeException("Failed to exclude node from cluster allocation"); + } + } catch (IOException e) { + throw new RuntimeException("Failed to exclude node from cluster allocation", e); + } + }); + } + + /** + * started by onRemove() to check if all shards have been relocated + */ + void checkRemovalStatus() { + final Optional restClient = restClient(); + if (restClient.isPresent()) { + try { + final ClusterClient clusterClient = restClient.get().cluster(); + final ClusterHealthResponse health = clusterClient + .health(new ClusterHealthRequest(), RequestOptions.DEFAULT); + if (health.getRelocatingShards() == 0) { + onEvent(OpensearchEvent.PROCESS_STOPPED); + executorService.shutdown(); + eventBus.post(DataNodeLifecycleEvent.create(nodeId.getNodeId(), DataNodeLifecycleTrigger.REMOVED)); + } + } catch (IOException | OpenSearchStatusException e) { + throw new RuntimeException("Error checking removal status", e); + } + } + } + + @Override + public void reset() { + stop(); + configure(); + start(); + } + + @Override + public void onStart() { + } + + @Override + public void onStdOut(String line) { + LOG.info(line); + stdout.offer(line); + } + + @Override + public void onStdErr(String line) { + LOG.warn(line); + stderr.offer(line); + } + + @Override + public void onProcessComplete(int exitValue) { + LOG.info("Opensearch process completed with exit code {}", exitValue); + onEvent(OpensearchEvent.PROCESS_TERMINATED); + } + + @Override + public void onProcessFailed(ExecuteException e) { + LOG.warn("Opensearch process failed", e); + onEvent(OpensearchEvent.PROCESS_TERMINATED); + } + + + @Override + public void available() { + if (!allocationExcludeChecked) { + this.checkAllocationEnabledStatus(); + } + } + + @Override + public boolean isManagerNode() { + return restClient() + .flatMap(this::requestClusterState) + .map(r -> r.nodes().get(r.clusterManagerNode())) + .map(managerNode -> configuration.getDatanodeNodeName().equals(managerNode.name())) + .orElse(false); + } + + private Optional requestClusterState(RestHighLevelClient client) { + try { + final Response response = client.getLowLevelClient().performRequest(new Request("GET", "_cluster/state/")); + final ClusterStateResponse state = objectMapper.readValue(response.getEntity().getContent(), ClusterStateResponse.class); + return Optional.of(state); + } catch (IOException e) { + LOG.warn("Failed to obtain cluster state response", e); + return Optional.empty(); + } + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcessService.java b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcessService.java new file mode 100644 index 000000000000..fb01fe100ea6 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/OpensearchProcessService.java @@ -0,0 +1,174 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +import com.google.common.eventbus.EventBus; +import com.google.common.eventbus.Subscribe; +import com.google.common.util.concurrent.AbstractIdleService; +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import jakarta.inject.Singleton; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.bootstrap.preflight.DatanodeDirectoriesLockfileCheck; +import org.graylog.datanode.configuration.OpensearchConfigurationService; +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; +import org.graylog2.bootstrap.preflight.PreflightConfigResult; +import org.graylog2.bootstrap.preflight.PreflightConfigService; +import org.graylog2.datanode.DataNodeLifecycleEvent; +import org.graylog2.datanode.RemoteReindexAllowlistEvent; +import org.graylog2.plugin.system.NodeId; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Singleton +public class OpensearchProcessService extends AbstractIdleService implements Provider { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchProcessService.class); + + private final OpensearchProcess process; + private final OpensearchConfigurationService configurationProvider; + private final NodeId nodeId; + private final DatanodeDirectoriesLockfileCheck lockfileCheck; + private final PreflightConfigService preflightConfigService; + private final Configuration configuration; + + private final OpensearchStateMachine stateMachine; + private final CsrRequester csrRequester; + private boolean processAutostart = true; + + + @Inject + public OpensearchProcessService( + final OpensearchConfigurationService configurationProvider, + final EventBus eventBus, + final Configuration configuration, + final NodeId nodeId, + final DatanodeDirectoriesLockfileCheck lockfileCheck, + final PreflightConfigService preflightConfigService, + final OpensearchProcess process, CsrRequester csrRequester, OpensearchStateMachine stateMachine) { + this.configurationProvider = configurationProvider; + this.configuration = configuration; + this.nodeId = nodeId; + this.lockfileCheck = lockfileCheck; + this.preflightConfigService = preflightConfigService; + this.process = process; + this.csrRequester = csrRequester; + this.stateMachine = stateMachine; + eventBus.register(this); + } + + @Subscribe + @SuppressWarnings("unused") + public void handleRemoteReindexAllowlistEvent(RemoteReindexAllowlistEvent event) { + switch (event.action()) { + case ADD -> this.configurationProvider.setAllowlist(event.allowlist(), event.trustedCertificates()); + case REMOVE -> this.configurationProvider.removeAllowlist(); + } + } + + @Subscribe + @SuppressWarnings("unused") + public void handleNodeLifecycleEvent(DataNodeLifecycleEvent event) { + if (nodeId.getNodeId().equals(event.nodeId())) { + switch (event.trigger()) { + case REMOVE -> stateMachine.fire(OpensearchEvent.PROCESS_REMOVE); + case RESET -> stateMachine.fire(OpensearchEvent.RESET); + case STOP -> this.shutDown(); + case START -> stateMachine.fire(OpensearchEvent.PROCESS_STARTED); + case REQUEST_CSR -> { + this.processAutostart = false; + csrRequester.triggerCertificateSigningRequest(); + } + case REQUEST_CSR_WITH_AUTOSTART -> { + this.processAutostart = true; + csrRequester.triggerCertificateSigningRequest(); + } + } + } + } + + private void checkWritePreflightFinishedOnInsecureStartup() { + if (configuration.isInsecureStartup()) { + var preflight = preflightConfigService.getPreflightConfigResult(); + if (preflight == null || !preflight.equals(PreflightConfigResult.FINISHED)) { + preflightConfigService.setConfigResult(PreflightConfigResult.FINISHED); + } + } + } + + /** + * triggered when starting the service + */ + @Override + protected void startUp() { + + } + + @Subscribe + public void onConfigurationChangeEvent(OpensearchConfigurationChangeEvent event) { + onConfigurationChange(event.config()); + } + + private void onConfigurationChange(OpensearchConfiguration config) { + configure(config); + if (config.securityConfigured()) { + LOG.info("OpenSearch starting up"); + checkWritePreflightFinishedOnInsecureStartup(); + try { + lockfileCheck.checkDatanodeLock(config.getDatanodeDirectories().getDataTargetDir()); + if (stateMachine.isInState(OpensearchState.WAITING_FOR_CONFIGURATION) && !this.processAutostart) { + stateMachine.fire(OpensearchEvent.PROCESS_PREPARED); + this.processAutostart = true; // reset to default + } else { + stateMachine.fire(OpensearchEvent.PROCESS_STARTED); + } + } catch (Exception e) { + LOG.error("Could not start up data node", e); + } + } + } + + private void configure(OpensearchConfiguration config) { + if (config.securityConfigured()) { + this.process.configure(config); + } else { + String noConfigMessage = """ + \n + ======================================================================================================== + It seems you are starting Data node for the first time. The current configuration is not sufficient to + start the indexer process because a security configuration is missing. You have to either provide http + and transport SSL certificates or use the Graylog preflight interface to configure this Data node remotely. + ======================================================================================================== + """; + LOG.info(noConfigMessage); + } + } + + + @Override + protected void shutDown() { + stateMachine.fire(OpensearchEvent.PROCESS_STOPPED); + } + + @Override + public OpensearchProcess get() { + return process; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/cli/AbstractOpensearchCli.java b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/AbstractOpensearchCli.java new file mode 100644 index 000000000000..4f2b87195446 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/AbstractOpensearchCli.java @@ -0,0 +1,112 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.cli; + +import org.apache.commons.exec.CommandLine; +import org.apache.commons.exec.DefaultExecuteResultHandler; +import org.apache.commons.exec.DefaultExecutor; +import org.apache.commons.exec.PumpStreamHandler; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public abstract class AbstractOpensearchCli { + + private final Path configPath; + private final Path binPath; + + /** + * @param configPath Opensearch CLI tools adapt configuration stored under OPENSEARCH_PATH_CONF env property. + * This is why wealways want to set this configPath for each CLI tool. + * @param bin location of the actual executable binary that this wrapper handles + */ + private AbstractOpensearchCli(Path configPath, Path bin) { + this.configPath = configPath; + this.binPath = bin; + } + + protected AbstractOpensearchCli(Path configDir, Path binDir, String binName) { + this(configDir, checkExecutable(binDir.resolve(binName))); + } + + private static Path checkExecutable(Path path) { + if (!Files.isExecutable(path)) { + throw new IllegalArgumentException("Path " + path + " doesn't point to any known opensearch cli tool"); + } + return path; + } + + protected String runBatch(String... args) { + return runWithStdin(Collections.emptyList(), args); + } + + /** + * @param answersToPrompts Some opensearch CLI tools have to be operated in an interactive way - provide answers + * to questions the tool is asking. It's scripting unfriendly. Our way around this is to + * provide an input stream of expected responses, each delimited by \n. There is no validation + * and no logic, just the expected order of responses. + * @param args arguments of the command, in opensearch-keystore create, the create is the first argument + * @return All the STDOUT and STDERR of the process merged into one String. + */ + protected String runWithStdin(List answersToPrompts, String... args) { + final CommandLine cmd = new CommandLine(binPath.toFile()); + Arrays.asList(args).forEach(cmd::addArgument); + + final ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + + final PumpStreamHandler pumpStreamHandler; + if (answersToPrompts.isEmpty()) { + pumpStreamHandler = new PumpStreamHandler(stdout, stdout); + } else { + InputStream input = new ByteArrayInputStream(String.join("\n", answersToPrompts).getBytes(Charset.defaultCharset())); + pumpStreamHandler = new PumpStreamHandler(stdout, stdout, input); + } + + // TODO: add watchdog and timeout to the underlying command handling + final DefaultExecutor executor = new DefaultExecutor.Builder<>() + .setExecuteStreamHandler(pumpStreamHandler) + .get(); + + try { + final DefaultExecuteResultHandler executeResultHandler = new DefaultExecuteResultHandler(); + final Map env = Collections.singletonMap("OPENSEARCH_PATH_CONF", configPath.toAbsolutePath().toString()); + executor.execute(cmd, env, executeResultHandler); + executeResultHandler.waitFor(); + final int exitValue = executeResultHandler.getExitValue(); + if (exitValue != 0) { + throw new RuntimeException("Failed to execute opensearch cli" + binPath + "\n" + formatOutput(stdout)); + } + return stdout.toString(StandardCharsets.UTF_8).trim(); + } catch (IOException | InterruptedException e) { + throw new RuntimeException("Failed to execute opensearch cli" + binPath + "\n" + formatOutput(stdout)); + } + } + + private String formatOutput(ByteArrayOutputStream stdout) { + return "STDOUT/STDERR: " + stdout.toString(StandardCharsets.UTF_8); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchCli.java b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchCli.java new file mode 100644 index 000000000000..fbdd8cc1d693 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchCli.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.cli; + +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; + +import java.nio.file.Path; + +/** + * Collection of opensearch CLI tools. All of them need to have OPENSEARCH_PATH_CONF preconfigured, so they operate + * on the correct version of configuration. + */ +public class OpensearchCli { + + private final OpensearchKeystoreCli keystore; + + public OpensearchCli(OpensearchConfiguration config) { + this( + config.getOpensearchConfigurationDir().configurationRoot(), + config.getOpensearchDistribution().getOpensearchBinDirPath() + ); + } + + public OpensearchCli(Path configDir, Path binDir) { + this.keystore = new OpensearchKeystoreCli(configDir, binDir); + } + + public OpensearchKeystoreCli keystore() { + return keystore; + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchCommandLineProcess.java b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchCommandLineProcess.java new file mode 100644 index 000000000000..a020e3bad86c --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchCommandLineProcess.java @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.cli; + +import com.github.rholder.retry.Attempt; +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.RetryListener; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import jakarta.validation.constraints.NotNull; +import org.apache.commons.exec.OS; +import org.graylog.datanode.configuration.OpensearchConfigurationDir; +import org.graylog.datanode.configuration.OpensearchConfigurationException; +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; +import org.graylog.datanode.process.CommandLineProcess; +import org.graylog.datanode.process.CommandLineProcessListener; +import org.graylog.datanode.process.ProcessInformation; +import org.graylog.datanode.process.ProcessListener; +import org.graylog.datanode.process.configuration.files.DatanodeConfigFile; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.Closeable; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +public class OpensearchCommandLineProcess implements Closeable { + private static final Logger LOG = LoggerFactory.getLogger(OpensearchCommandLineProcess.class); + + private final CommandLineProcess commandLineProcess; + private final CommandLineProcessListener resultHandler; + + + /** + * as long as OpenSearch is not supported on macOS, we have to fix the jdk path if we want to + * start the DataNode inside IntelliJ. + * + * @param config + */ + private void fixJdkOnMac(final OpensearchConfiguration config) { + final var isMacOS = OS.isFamilyMac(); + final var jdk = config.getOpensearchDistribution().directory().resolve("jdk.app"); + final var jdkNotLinked = !Files.exists(jdk); + if (isMacOS && jdkNotLinked) { + // Link System jdk into startup folder, get path: + final ProcessBuilder builder = new ProcessBuilder("/usr/libexec/java_home"); + builder.redirectErrorStream(true); + try { + final Process process = builder.start(); + final BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(), Charset.defaultCharset())); + var line = reader.readLine(); + if (line != null && Files.exists(Path.of(line))) { + final var target = Path.of(line); + final var src = Files.createDirectories(jdk.resolve("Contents")); + Files.createSymbolicLink(src.resolve("Home"), target); + } else { + LOG.error("Output of '/usr/libexec/java_home' is not the jdk: {}", line); + } + // cleanup + process.destroy(); + reader.close(); + } catch (IOException e) { + LOG.error("Could not link jdk.app on macOS: {}", e.getMessage(), e); + } + } + } + + private void writeOpenSearchConfig(final OpensearchConfiguration config) { + final OpensearchConfigurationDir confDir = config.getOpensearchConfigurationDir(); + config.configFiles().forEach(cf -> persistConfigFile(confDir, cf)); + } + + private static void persistConfigFile(OpensearchConfigurationDir confDir, DatanodeConfigFile cf) { + try { + final Path targetFile = confDir.createOpensearchProcessConfigurationFile(cf.relativePath()); + try (final FileOutputStream file = new FileOutputStream(targetFile.toFile())) { + cf.write(file); + } + } catch (IOException e) { + throw new OpensearchConfigurationException("Failed to create opensearch config file " + cf.relativePath(), e); + } + } + + public OpensearchCommandLineProcess(OpensearchConfiguration config, ProcessListener listener) { + fixJdkOnMac(config); + configureOpensearchKeystoreSecrets(config); + final Path executable = config.getOpensearchDistribution().getOpensearchExecutable(); + writeOpenSearchConfig(config); + resultHandler = new CommandLineProcessListener(listener); + commandLineProcess = new CommandLineProcess(executable, List.of(), resultHandler, config.getEnv()); + } + + private void configureOpensearchKeystoreSecrets(OpensearchConfiguration config) { + final OpensearchCli opensearchCli = new OpensearchCli(config); + LOG.info("Creating opensearch keystore"); + final String createdMessage = opensearchCli.keystore().create(); + LOG.info(createdMessage); + final Map keystoreItems = config.getKeystoreItems(); + keystoreItems.forEach((key, value) -> opensearchCli.keystore().add(key, value)); + LOG.info("Added {} keystore items", keystoreItems.size()); + } + + public void start() { + commandLineProcess.start(); + } + + @Override + public void close() { + commandLineProcess.stop(); + resultHandler.stopListening(); + waitForProcessTermination(); + } + + private void waitForProcessTermination() { + try { + RetryerBuilder.newBuilder() + .retryIfResult(Boolean.TRUE::equals) + .withWaitStrategy(WaitStrategies.fixedWait(100, TimeUnit.MILLISECONDS)) + .withStopStrategy(StopStrategies.stopAfterDelay(60, TimeUnit.SECONDS)) + .withRetryListener(new RetryListener() { + @Override + public void onRetry(Attempt attempt) { + LOG.info("Process " + commandLineProcess.processInfo().pid() + " still alive, waiting for termination. Retry #" + attempt.getAttemptNumber()); + } + }) + .build() + .call(() -> commandLineProcess.processInfo().alive()); + LOG.info("Process " + commandLineProcess.processInfo().pid() + " successfully terminated."); + } catch (ExecutionException | RetryException e) { + final String message = "Failed to terminate opensearch process " + commandLineProcess.processInfo().pid(); + LOG.error(message, e); + throw new RuntimeException(message, e); + } + } + + @NotNull + public ProcessInformation processInfo() { + return commandLineProcess.processInfo(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchKeystoreCli.java b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchKeystoreCli.java new file mode 100644 index 000000000000..9e5b25972b1d --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/cli/OpensearchKeystoreCli.java @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.cli; + +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class OpensearchKeystoreCli extends AbstractOpensearchCli { + + public OpensearchKeystoreCli(Path configDir, Path binDir) { + super(configDir, binDir, "opensearch-keystore"); + } + + /** + * Create a new opensearch keystore. This command expects that there is no keystore. If there is a keystore, + * it will respond YES to override existing. + * + * @return STDOUT/STDERR of the execution as one String + */ + public String create() { + return runWithStdin(Collections.singletonList("Y"), "create"); + } + + /** + * Add secrets to the store. The command is interactive, it will ask for the secret value (to avoid recording the value + * in the command line history). So we have to work around that and provide the value in STDIN. + */ + public void add(String key, String secretValue) { + runWithStdin(List.of(secretValue), "add", "-x", key); // -x allows input from stdin, bypassing the prompt + } + + public List list() { + final String rawResponse = runWithStdin(Collections.emptyList(), "list"); + final String[] items = rawResponse.split("\n"); + return Arrays.asList(items); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchConfiguration.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchConfiguration.java new file mode 100644 index 000000000000..f45430980d95 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchConfiguration.java @@ -0,0 +1,180 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration; + +import com.google.common.collect.ImmutableMap; +import jakarta.annotation.Nonnull; +import org.graylog.datanode.OpensearchDistribution; +import org.graylog.datanode.configuration.DatanodeDirectories; +import org.graylog.datanode.configuration.OpensearchConfigurationDir; +import org.graylog.datanode.process.Environment; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; +import org.graylog.datanode.process.configuration.files.DatanodeConfigFile; +import org.graylog.datanode.process.configuration.files.YamlConfigFile; +import org.graylog.security.certutil.csr.KeystoreInformation; +import org.graylog.shaded.opensearch2.org.apache.http.HttpHost; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.file.Path; +import java.security.KeyStore; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; + +public class OpensearchConfiguration { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchConfiguration.class); + + private final OpensearchDistribution opensearchDistribution; + private final String hostname; + private final int httpPort; + private final List configurationParts; + private final OpensearchConfigurationDir opensearchConfigurationDir; + private final DatanodeDirectories datanodeDirectories; + + public OpensearchConfiguration(OpensearchDistribution opensearchDistribution, DatanodeDirectories datanodeDirectories, String hostname, int httpPort, List configurationParts) { + this.opensearchDistribution = opensearchDistribution; + this.hostname = hostname; + this.httpPort = httpPort; + this.configurationParts = configurationParts; + this.datanodeDirectories = datanodeDirectories; + this.opensearchConfigurationDir = datanodeDirectories.createUniqueOpensearchProcessConfigurationDir(); + } + + @Nonnull + private String buildRolesList() { + return configurationParts.stream() + .flatMap(cfg -> cfg.nodeRoles().stream()) + .collect(Collectors.joining(",")); + } + + public Environment getEnv() { + final Environment env = new Environment(System.getenv()); + + List javaOpts = new LinkedList<>(); + + configurationParts.stream().map(DatanodeConfigurationPart::javaOpts) + .forEach(javaOpts::addAll); + + env.put("OPENSEARCH_JAVA_OPTS", String.join(" ", javaOpts)); + env.put("OPENSEARCH_PATH_CONF", opensearchConfigurationDir.configurationRoot().toString()); + return env; + } + + public HttpHost getRestBaseUrl() { + return new HttpHost(hostname, httpPort, isHttpsEnabled() ? "https" : "http"); + } + + public boolean isHttpsEnabled() { + return httpCertificate().isPresent(); + } + + /** + * Are there any {@link org.graylog.datanode.configuration.variants.OpensearchCertificatesProvider} configured? + */ + public boolean securityConfigured() { + return configurationParts.stream().anyMatch(DatanodeConfigurationPart::securityConfigured); + } + + + public Map getKeystoreItems() { + final ImmutableMap.Builder builder = ImmutableMap.builder(); + configurationParts.stream() + .map(DatanodeConfigurationPart::keystoreItems) + .forEach(builder::putAll); + + return builder.build(); + } + + public KeyStore trustStore() { + return configurationParts.stream() + .map(DatanodeConfigurationPart::trustStore) + .filter(Objects::nonNull) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("This should not happen, truststore should always be present")); + } + + public Optional httpCertificate() { + return configurationParts.stream() + .map(DatanodeConfigurationPart::httpCertificate) + .filter(Objects::nonNull) + .findFirst(); + } + + public Optional transportCertificate() { + return configurationParts.stream() + .map(DatanodeConfigurationPart::transportCertificate) + .filter(Objects::nonNull) + .findFirst(); + } + + public List opensearchRoles() { + return configurationParts.stream() + .flatMap(cfg -> cfg.nodeRoles().stream()) + .collect(Collectors.toList()); + } + + public List configFiles() { + + final List configFiles = new LinkedList<>(); + + configurationParts.stream() + .flatMap(cp -> cp.configFiles().stream()) + .forEach(configFiles::add); + + configFiles.add(new YamlConfigFile(Path.of("opensearch.yml"), opensearchYmlConfig())); + + return configFiles; + } + + private Map opensearchYmlConfig() { + Map config = new LinkedHashMap<>(); + + // this needs special treatment as it's as an aggregation of other configuration parts + config.put("node.roles", buildRolesList()); + + configurationParts.stream() + .map(DatanodeConfigurationPart::properties) + .forEach(config::putAll); + + // now copy all the environment values to the configuration arguments. Opensearch won't do it for us, + // because we are using tar distriburion and opensearch does this only for docker dist. See opensearch-env script + // additionally, the env variables have to be prefixed with opensearch. (e.g. "opensearch.cluster.routing.allocation.disk.threshold_enabled") + getEnv().getEnv().entrySet().stream() + .filter(entry -> entry.getKey().matches("^opensearch\\.[a-z0-9_]+(?:\\.[a-z0-9_]+)+")) + .peek(entry -> LOG.info("Detected pass-through opensearch property {}:{}", entry.getKey().substring("opensearch.".length()), entry.getValue())) + .forEach(entry -> config.put(entry.getKey().substring("opensearch.".length()), entry.getValue())); + return config; + } + + public OpensearchDistribution getOpensearchDistribution() { + return opensearchDistribution; + } + + public OpensearchConfigurationDir getOpensearchConfigurationDir() { + return opensearchConfigurationDir; + } + + public DatanodeDirectories getDatanodeDirectories() { + return datanodeDirectories; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchConfigurationParams.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchConfigurationParams.java new file mode 100644 index 000000000000..675020249399 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchConfigurationParams.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration; + + +import org.graylog.datanode.process.configuration.beans.ConfigurationBuildParams; + +import java.security.cert.X509Certificate; +import java.util.List; + +public record OpensearchConfigurationParams(List trustedCertificates, + java.util.Map transientConfiguration) implements ConfigurationBuildParams { +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchUsableSpace.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchUsableSpace.java new file mode 100644 index 000000000000..ca489cb7d03a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchUsableSpace.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration; + +import java.nio.file.Path; + +public record OpensearchUsableSpace(Path dataDir, long usableSpace) { +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchUsableSpaceProvider.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchUsableSpaceProvider.java new file mode 100644 index 000000000000..e584654c39e7 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/OpensearchUsableSpaceProvider.java @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration; + +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import jakarta.inject.Singleton; +import org.graylog.datanode.configuration.DatanodeConfiguration; + +import java.io.IOException; +import java.nio.file.FileStore; +import java.nio.file.Files; +import java.nio.file.Path; + +@Singleton +public class OpensearchUsableSpaceProvider implements Provider { + + private final Path dataTargetDir; + + @Inject + public OpensearchUsableSpaceProvider(DatanodeConfiguration datanodeConfiguration) { + dataTargetDir = datanodeConfiguration.datanodeDirectories().getDataTargetDir(); + } + + @Override + public OpensearchUsableSpace get() { + return new OpensearchUsableSpace(dataTargetDir, getUsableSpace(dataTargetDir)); + } + + private static long getUsableSpace(Path opensearchDataLocation) { + final FileStore fileStore; + try { + fileStore = Files.getFileStore(opensearchDataLocation); + return fileStore.getUsableSpace(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchClusterConfigurationBean.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchClusterConfigurationBean.java new file mode 100644 index 000000000000..1e8de037a67a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchClusterConfigurationBean.java @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration.beans.impl; + +import com.google.common.collect.ImmutableMap; +import jakarta.inject.Inject; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationBean; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; +import org.graylog.datanode.process.configuration.files.TextConfigFile; +import org.graylog2.cluster.Node; +import org.graylog2.cluster.nodes.DataNodeDto; +import org.graylog2.cluster.nodes.NodeService; + +import java.nio.file.Path; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class OpensearchClusterConfigurationBean implements DatanodeConfigurationBean { + + public static final Path UNICAST_HOSTS_FILE = Path.of("unicast_hosts.txt"); + + private final Configuration localConfiguration; + private final NodeService nodeService; + + @Inject + public OpensearchClusterConfigurationBean(Configuration localConfiguration, NodeService nodeService) { + this.localConfiguration = localConfiguration; + this.nodeService = nodeService; + } + + @Override + public DatanodeConfigurationPart buildConfigurationPart(OpensearchConfigurationParams trustedCertificates) { + ImmutableMap.Builder properties = ImmutableMap.builder(); + + properties.put("network.bind_host", localConfiguration.getBindAddress()); + properties.put("network.publish_host", localConfiguration.getHostname()); + + if (localConfiguration.getClustername() != null && !localConfiguration.getClustername().isBlank()) { + properties.put("cluster.name", localConfiguration.getClustername()); + } + + if (localConfiguration.getBindAddress() != null && !localConfiguration.getBindAddress().isBlank()) { + properties.put("network.host", localConfiguration.getBindAddress()); + } + properties.put("http.port", String.valueOf(localConfiguration.getOpensearchHttpPort())); + properties.put("transport.port", String.valueOf(localConfiguration.getOpensearchTransportPort())); + + properties.put("node.name", localConfiguration.getDatanodeNodeName()); + + if (localConfiguration.getInitialClusterManagerNodes() != null && !localConfiguration.getInitialClusterManagerNodes().isBlank()) { + properties.put("cluster.initial_cluster_manager_nodes", localConfiguration.getInitialClusterManagerNodes()); + } else { + final var nodeList = String.join(",", nodeService.allActive().values().stream().map(Node::getHostname).collect(Collectors.toSet())); + properties.put("cluster.initial_cluster_manager_nodes", nodeList); + } + + final List discoverySeedHosts = localConfiguration.getOpensearchDiscoverySeedHosts(); + if (discoverySeedHosts != null && !discoverySeedHosts.isEmpty()) { + properties.put("discovery.seed_hosts", String.join(",", discoverySeedHosts)); + } + + properties.put("discovery.seed_providers", "file"); + + // TODO: why do we have this configured? + properties.put("node.max_local_storage_nodes", "3"); + + return DatanodeConfigurationPart.builder() + .properties(properties.build()) + .withConfigFile(seedHostFile()) + .build(); + } + + private TextConfigFile seedHostFile() { + final String data = nodeService.allActive().values().stream() + .map(DataNodeDto::getClusterAddress) + .filter(Objects::nonNull) + .collect(Collectors.joining("\n")); + return new TextConfigFile(UNICAST_HOSTS_FILE, data); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchCommonConfigurationBean.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchCommonConfigurationBean.java new file mode 100644 index 000000000000..101649ea7512 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchCommonConfigurationBean.java @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration.beans.impl; + +import com.google.common.collect.ImmutableMap; +import jakarta.inject.Inject; +import org.apache.commons.exec.OS; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationBean; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; + +import java.util.List; +import java.util.Map; + +public class OpensearchCommonConfigurationBean implements DatanodeConfigurationBean { + + private final Configuration localConfiguration; + private final DatanodeConfiguration datanodeConfiguration; + + private static final List DEFAULT_NODE_ROLES = List.of("cluster_manager", "data", "ingest", "remote_cluster_client"); + + @Inject + public OpensearchCommonConfigurationBean(Configuration localConfiguration, DatanodeConfiguration datanodeConfiguration) { + this.localConfiguration = localConfiguration; + this.datanodeConfiguration = datanodeConfiguration; + } + + @Override + public DatanodeConfigurationPart buildConfigurationPart(OpensearchConfigurationParams buildParams) { + return DatanodeConfigurationPart.builder() + .properties(commonOpensearchConfig(buildParams)) + .nodeRoles(getNodeRoles()) + .javaOpt("-Xms%s".formatted(localConfiguration.getOpensearchHeap())) + .javaOpt("-Xmx%s".formatted(localConfiguration.getOpensearchHeap())) + .javaOpt("-Dopensearch.transport.cname_in_publish_address=true") + .build(); + } + + private List getNodeRoles() { + final List configuredRoles = localConfiguration.getNodeRoles(); + if(configuredRoles != null && !configuredRoles.isEmpty()) { + return configuredRoles; + } else { + return DEFAULT_NODE_ROLES; + } + } + + private Map commonOpensearchConfig(OpensearchConfigurationParams buildParams) { + final ImmutableMap.Builder config = ImmutableMap.builder(); + localConfiguration.getOpensearchNetworkHost().ifPresent( + networkHost -> config.put("network.host", networkHost)); + config.put("path.data", datanodeConfiguration.datanodeDirectories().getDataTargetDir().toString()); + config.put("path.logs", datanodeConfiguration.datanodeDirectories().getLogsTargetDir().toString()); + + if (localConfiguration.getOpensearchDebug() != null && !localConfiguration.getOpensearchDebug().isBlank()) { + config.put("logger.org.opensearch", localConfiguration.getOpensearchDebug()); + } + + // common OpenSearch config parameters from our docs + config.put("indices.query.bool.max_clause_count", localConfiguration.getIndicesQueryBoolMaxClauseCount().toString()); + + config.put("action.auto_create_index", "false"); + + // currently, startup fails on macOS without disabling this filter. + // for a description of the filter (although it's for ES), see https://www.elastic.co/guide/en/elasticsearch/reference/7.17/_system_call_filter_check.html + if (OS.isFamilyMac()) { + config.put("bootstrap.system_call_filter", "false"); + } + + config.putAll(buildParams.transientConfiguration()); + + return config.build(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchDefaultConfigFilesBean.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchDefaultConfigFilesBean.java new file mode 100644 index 000000000000..52d3025c1d84 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchDefaultConfigFilesBean.java @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration.beans.impl; + +import jakarta.annotation.Nonnull; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationBean; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; +import org.graylog.datanode.process.configuration.files.DatanodeConfigFile; +import org.graylog.datanode.process.configuration.files.InputStreamConfigFile; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +public class OpensearchDefaultConfigFilesBean implements DatanodeConfigurationBean { + + @Override + public DatanodeConfigurationPart buildConfigurationPart(OpensearchConfigurationParams trustedCertificates) { + return DatanodeConfigurationPart.builder() + .configFiles(collectConfigFiles()) + .build(); + } + + private List collectConfigFiles() { + // this is a directory in main/resources that holds all the initial configuration files needed by the opensearch + // we manage this directory in git. Generally we assume that this is a read-only location and we need to copy + // its content to a read-write location for the managed opensearch process. + // This copy happens during each opensearch process start and will override any files that already exist + // from previous runs. + final Path sourceOfInitialConfiguration = Path.of("opensearch", "config"); + try { + return synchronizeConfig(sourceOfInitialConfiguration); + } catch (URISyntaxException | IOException e) { + throw new RuntimeException(e); + } + } + + public List synchronizeConfig(Path configRelativePath) throws URISyntaxException, IOException { + final URI uriToConfig = OpensearchDefaultConfigFilesBean.class.getResource("/" + configRelativePath.toString()).toURI(); + if ("jar".equals(uriToConfig.getScheme())) { + return copyFromJar(configRelativePath, uriToConfig); + } else { + return copyFromLocalFs(configRelativePath); + } + } + + private static List copyFromJar(Path configRelativePath, URI uri) throws IOException { + try ( + final FileSystem fs = FileSystems.newFileSystem(uri, Collections.emptyMap()); + ) { + // Get hold of the path to the top level directory of the JAR file + final Path resourcesRoot = fs.getPath("/"); + final Path source = resourcesRoot.resolve(configRelativePath.toString()); // caution, the toString is needed here to resolve properly! + return collectRecursively(source); + } + } + + private static List copyFromLocalFs(Path configRelativePath) throws URISyntaxException, IOException { + final Path source = Paths.get(OpensearchDefaultConfigFilesBean.class.getResource("/" + configRelativePath).toURI()); + return collectRecursively(source); + } + + private static List collectRecursively(Path source) throws IOException { + List configFiles = new LinkedList<>(); + Files.walkFileTree(source, new SimpleFileVisitor<>() { + @Override + public FileVisitResult visitFile(Path sourceFile, BasicFileAttributes attrs) { + final Path relativePath = source.relativize(sourceFile); + // the relative path may come from a different provider than we'll use later, triggering ProviderMismatchException + // We need to disconnect it from the existing provider and use the default one. We can't use relative paths from zip/jar + // to create configuration files on a local file system. + final Path relativePathWithoutProvider = Path.of(relativePath.toString()); + try { + final ByteArrayInputStream stream = copyToMemory(sourceFile); + configFiles.add(new InputStreamConfigFile(relativePathWithoutProvider, stream)); + } catch (IOException e) { + throw new RuntimeException(e); + } + return FileVisitResult.CONTINUE; + } + }); + return configFiles; + } + + @Nonnull + private static ByteArrayInputStream copyToMemory(Path sourceFile) throws IOException { + try (final InputStream inputStream = Files.newInputStream(sourceFile)) { + ByteArrayOutputStream memory = new ByteArrayOutputStream(); + inputStream.transferTo(memory); + return new ByteArrayInputStream(memory.toByteArray()); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchSecurityConfigurationBean.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchSecurityConfigurationBean.java new file mode 100644 index 000000000000..d57070645883 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/OpensearchSecurityConfigurationBean.java @@ -0,0 +1,251 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration.beans.impl; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import jakarta.annotation.Nonnull; +import jakarta.inject.Inject; +import org.apache.commons.lang3.RandomStringUtils; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.OpensearchConfigurationException; +import org.graylog.datanode.configuration.variants.OpensearchCertificates; +import org.graylog.datanode.configuration.variants.OpensearchCertificatesProvider; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationBean; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; +import org.graylog.datanode.process.configuration.files.KeystoreConfigFile; +import org.graylog.datanode.process.configuration.files.OpensearchSecurityConfigurationFile; +import org.graylog.security.certutil.csr.InMemoryKeystoreInformation; +import org.graylog.security.certutil.csr.KeystoreInformation; +import org.graylog2.security.JwtSecret; +import org.graylog2.security.TruststoreCreator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.X509Certificate; +import java.util.Enumeration; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +public class OpensearchSecurityConfigurationBean implements DatanodeConfigurationBean { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchSecurityConfigurationBean.class); + + private static final String KEYSTORE_FORMAT = "PKCS12"; + private static final String TRUSTSTORE_FORMAT = "PKCS12"; + + /** + * This filename is used only internally - we copy user-provided certificates to this location and + * we configure opensearch to read this file. It doesn't have to match naming provided by user. + * The target configuration is regenerated during each startup, so it could also be a random filename + * as long as we use the same name as a copy-target and opensearch config property. + */ + private static final String TARGET_DATANODE_HTTP_KEYSTORE_FILENAME = "http-keystore.p12"; + /** + * This filename is used only internally - we copy user-provided certificates to this location and + * we configure opensearch to read this file. It doesn't have to match naming provided by user. + * The target configuration is regenerated during each startup, so it could also be a random filename + * as long as we use the same name as a copy-target and opensearch config property. + */ + private static final String TARGET_DATANODE_TRANSPORT_KEYSTORE_FILENAME = "transport-keystore.p12"; + + private static final Path TRUSTSTORE_FILE = Path.of("datanode-truststore.p12"); + + private final Set opensearchCertificatesProviders; + private final Configuration localConfiguration; + private final JwtSecret jwtSecret; + + @Inject + public OpensearchSecurityConfigurationBean(Set opensearchCertificatesProviders, + final Configuration localConfiguration, + final JwtSecret jwtSecret) { + this.opensearchCertificatesProviders = opensearchCertificatesProviders; + this.localConfiguration = localConfiguration; + this.jwtSecret = jwtSecret; + } + + @Override + public DatanodeConfigurationPart buildConfigurationPart(OpensearchConfigurationParams opensearchConfigurationParams) { + + final DatanodeConfigurationPart.Builder configurationBuilder = DatanodeConfigurationPart.builder(); + + Optional opensearchCertificates = opensearchCertificatesProviders.stream() + .filter(s -> s.isConfigured(localConfiguration)) + .findFirst() + .map(OpensearchCertificatesProvider::build); + + configurationBuilder.securityConfigured(opensearchCertificates.isPresent()); // Caution, this may include insecure_startup config with no certs! + + final String truststorePassword = RandomStringUtils.randomAlphabetic(256); + + final TruststoreCreator truststoreCreator = TruststoreCreator.newDefaultJvm() + .addCertificates(opensearchConfigurationParams.trustedCertificates()); + + final Optional httpCert = opensearchCertificates + .map(OpensearchCertificates::getHttpCertificate); + + final Optional transportCert = opensearchCertificates + .map(OpensearchCertificates::getTransportCertificate); + + httpCert.ifPresent(cert -> { + try { + configurationBuilder.httpCertificate(cert); + configurationBuilder.withConfigFile(new KeystoreConfigFile(Path.of(TARGET_DATANODE_HTTP_KEYSTORE_FILENAME), cert)); + truststoreCreator.addCertificates(cert); + logCertificateInformation("HTTP certificate", cert); + } catch (GeneralSecurityException | IOException e) { + throw new OpensearchConfigurationException(e); + } + }); + + transportCert.ifPresent(cert -> { + try { + configurationBuilder.transportCertificate(cert); + configurationBuilder.withConfigFile(new KeystoreConfigFile(Path.of(TARGET_DATANODE_TRANSPORT_KEYSTORE_FILENAME), cert)); + truststoreCreator.addCertificates(cert); + logCertificateInformation("Transport certificate", cert); + } catch (GeneralSecurityException | IOException e) { + throw new OpensearchConfigurationException(e); + } + }); + + return configurationBuilder + .properties(properties(opensearchCertificates)) + .keystoreItems(keystoreItems(truststorePassword, httpCert, transportCert)) + .javaOpts(javaOptions(truststorePassword)) + .trustStore(truststoreCreator.getTruststore()) + .withConfigFile(truststoreFile(truststoreCreator, truststorePassword)) + .withConfigFile(new OpensearchSecurityConfigurationFile(jwtSecret)) + .build(); + } + + @Nonnull + private KeystoreConfigFile truststoreFile(TruststoreCreator truststoreCreator, String truststorePassword) { + final InMemoryKeystoreInformation keystore = new InMemoryKeystoreInformation(truststoreCreator.getTruststore(), truststorePassword.toCharArray()); + return new KeystoreConfigFile(TRUSTSTORE_FILE, keystore); + } + + + private Map properties(Optional opensearchCertificates) { + final ImmutableMap.Builder config = ImmutableMap.builder(); + + if (localConfiguration.getOpensearchAuditLog() != null && !localConfiguration.getOpensearchAuditLog().isBlank()) { + config.put("plugins.security.audit.type", localConfiguration.getOpensearchAuditLog()); + } + + // enable admin access via the REST API + config.put("plugins.security.restapi.admin.enabled", "true"); + + + if (opensearchCertificates.map(OpensearchCertificates::hasBothCertificates).orElse(false)) { + config.putAll(commonSecurityConfig()); + + config.put("plugins.security.ssl.transport.keystore_type", KEYSTORE_FORMAT); + config.put("plugins.security.ssl.transport.keystore_filepath", TARGET_DATANODE_TRANSPORT_KEYSTORE_FILENAME); + + opensearchCertificates.map(OpensearchCertificates::getTransportKeyAlias) + .ifPresent(alias -> config.put("plugins.security.ssl.transport.keystore_alias", alias)); + + config.put("plugins.security.ssl.transport.truststore_type", TRUSTSTORE_FORMAT); + config.put("plugins.security.ssl.transport.truststore_filepath", TRUSTSTORE_FILE.toString()); + + config.put("plugins.security.ssl.http.enabled", "true"); + + config.put("plugins.security.ssl.http.keystore_type", KEYSTORE_FORMAT); + config.put("plugins.security.ssl.http.keystore_filepath", TARGET_DATANODE_HTTP_KEYSTORE_FILENAME); + + opensearchCertificates.map(OpensearchCertificates::getHttpKeyAlias) + .ifPresent(alias -> config.put("plugins.security.ssl.http.keystore_alias", alias)); + + config.put("plugins.security.ssl.http.truststore_type", TRUSTSTORE_FORMAT); + config.put("plugins.security.ssl.http.truststore_filepath", TRUSTSTORE_FILE.toString()); + + // enable client cert auth + config.put("plugins.security.ssl.http.clientauth_mode", "OPTIONAL"); + } else { + config.put("plugins.security.disabled", "true"); + config.put("plugins.security.ssl.http.enabled", "false"); + } + return config.build(); + } + + private List javaOptions(String truststorePassword) { + final ImmutableList.Builder builder = ImmutableList.builder(); + builder.add("-Djavax.net.ssl.trustStore=" + TRUSTSTORE_FILE); + builder.add("-Djavax.net.ssl.trustStorePassword=" + truststorePassword); + builder.add("-Djavax.net.ssl.trustStoreType=pkcs12"); + return builder.build(); + } + + private Map commonSecurityConfig() { + final ImmutableMap.Builder config = ImmutableMap.builder(); + config.put("plugins.security.disabled", "false"); + + config.put("plugins.security.nodes_dn", "CN=*"); + config.put("plugins.security.allow_default_init_securityindex", "true"); + //config.put("plugins.security.authcz.admin_dn", "CN=kirk,OU=client,O=client,L=test,C=de"); + + config.put("plugins.security.enable_snapshot_restore_privilege", "true"); + config.put("plugins.security.check_snapshot_restore_write_privileges", "true"); + config.put("plugins.security.restapi.roles_enabled", "all_access,security_rest_api_access,readall"); + config.put("plugins.security.system_indices.enabled", "true"); + config.put("plugins.security.system_indices.indices", ".plugins-ml-model,.plugins-ml-task,.opendistro-alerting-config,.opendistro-alerting-alert*,.opendistro-anomaly-results*,.opendistro-anomaly-detector*,.opendistro-anomaly-checkpoints,.opendistro-anomaly-detection-state,.opendistro-reports-*,.opensearch-notifications-*,.opensearch-notebooks,.opensearch-observability,.opendistro-asynchronous-search-response*,.replication-metadata-store"); + + return config.build(); + } + + private Map keystoreItems(String truststorePassword, Optional httpCert, Optional transportCert) { + final ImmutableMap.Builder config = ImmutableMap.builder(); + config.put("plugins.security.ssl.transport.truststore_password_secure", new String(truststorePassword)); + config.put("plugins.security.ssl.http.truststore_password_secure", new String(truststorePassword)); + httpCert.ifPresent(c -> config.put("plugins.security.ssl.http.keystore_password_secure", new String(c.password()))); + transportCert.ifPresent(c -> config.put("plugins.security.ssl.transport.keystore_password_secure", new String(c.password()))); + return config.build(); + } + + + private void logCertificateInformation(String certificateType, KeystoreInformation keystore) { + try { + final KeyStore instance = keystore.loadKeystore(); + final Enumeration aliases = instance.aliases(); + while (aliases.hasMoreElements()) { + final Certificate cert = instance.getCertificate(aliases.nextElement()); + if (cert instanceof X509Certificate x509Certificate) { + final String alternativeNames = x509Certificate.getSubjectAlternativeNames() + .stream() + .map(san -> san.get(1)) + .map(Object::toString) + .collect(Collectors.joining(", ")); + LOG.info("Opensearch {} has following alternative names: {}", certificateType, alternativeNames); + LOG.info("Opensearch {} has following serial number: {}", certificateType, ((X509Certificate) cert).getSerialNumber()); + LOG.info("Opensearch {} has following validity: {} - {}", certificateType, ((X509Certificate) cert).getNotBefore(), ((X509Certificate) cert).getNotAfter()); + } + } + } catch (Exception e) { + throw new OpensearchConfigurationException("Failed to load kestore", e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/SearchableSnapshotsConfigurationBean.java b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/SearchableSnapshotsConfigurationBean.java new file mode 100644 index 000000000000..ff03ff1d31d8 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/configuration/beans/impl/SearchableSnapshotsConfigurationBean.java @@ -0,0 +1,185 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration.beans.impl; + +import com.google.common.collect.ImmutableMap; +import jakarta.annotation.Nonnull; +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import org.apache.commons.io.FileUtils; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.OpensearchConfigurationException; +import org.graylog.datanode.configuration.S3RepositoryConfiguration; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.opensearch.configuration.OpensearchUsableSpace; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationBean; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.math.BigDecimal; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * This opensearch configuration bean manages searchable snapshots and their S3 or local filesystem configuration. + * It configures the search role for the node if snapshots are enabled and also validates the node search cache size. + * If there is neither S3 nor local filesystem snapshot configuration, both search role and cache are disabled, + * preventing unnecessary disk space consumption on the node. + * + * The search role and cache configuration will be skipped if explicit list of {@code node_roles} is provided in the + * configuration, and it doesn't contain the {@code search} role. + */ +public class SearchableSnapshotsConfigurationBean implements DatanodeConfigurationBean { + + private static final Logger LOG = LoggerFactory.getLogger(SearchableSnapshotsConfigurationBean.class); + + static final String SEARCH_NODE_ROLE = "search"; + private final Configuration localConfiguration; + private final S3RepositoryConfiguration s3RepositoryConfiguration; + private final Provider usableSpaceProvider; + + @Inject + public SearchableSnapshotsConfigurationBean(Configuration localConfiguration, S3RepositoryConfiguration s3RepositoryConfiguration, Provider usableSpaceProvider) { + this.localConfiguration = localConfiguration; + this.s3RepositoryConfiguration = s3RepositoryConfiguration; + this.usableSpaceProvider = usableSpaceProvider; + } + + @Override + public DatanodeConfigurationPart buildConfigurationPart(OpensearchConfigurationParams trustedCertificates) { + if (snapshotsAreEnabled()) { + final DatanodeConfigurationPart.Builder builder = DatanodeConfigurationPart.builder(); + + final boolean searchRoleEnabled = searchRoleEnabled(); + if(searchRoleEnabled) { + validateUsableSpace(); + builder.addNodeRole(SEARCH_NODE_ROLE); + } + return builder + .properties(properties(searchRoleEnabled)) + .keystoreItems(keystoreItems()) + .build(); + } else if (searchRoleExplicitlyConfigured() && !snapshotsAreEnabled()) { + throw new OpensearchConfigurationException("Your configuration contains the search node role in node_roles but there is no" + + "snapshots repository configured. Please remove the role or provide path_repo or S3 repository credentials."); + } else { + LOG.info("Opensearch snapshots not configured, skipping search role and cache configuration."); + return DatanodeConfigurationPart.builder().build(); + } + } + + private boolean searchRoleExplicitlyConfigured() { + return localConfiguration.getNodeRoles() != null && localConfiguration.getNodeRoles().contains(SEARCH_NODE_ROLE); + } + + private boolean searchRoleEnabled() { + final boolean rolesNotConfigured = localConfiguration.getNodeRoles() == null || localConfiguration.getNodeRoles().isEmpty(); + return rolesNotConfigured || localConfiguration.getNodeRoles().contains(SEARCH_NODE_ROLE); + } + + private void validateUsableSpace() throws OpensearchConfigurationException { + final OpensearchUsableSpace usableSpace = usableSpaceProvider.get(); + final String configuredCacheSize = this.localConfiguration.getNodeSearchCacheSize(); + final long cacheSize = toBytes(configuredCacheSize); + final String usableHumanReadable = toHumanReadableSize(usableSpace.usableSpace()); + if (cacheSize >= usableSpace.usableSpace()) { + throw new OpensearchConfigurationException(""" + There is not enough usable space for the node search cache. Your system has only %s available. + Either decrease node_search_cache_size configuration or make sure that datanode has enough free disk space. + Data directory: %s, current node_search_cache_size: %s""" + .formatted(usableHumanReadable, usableSpace.dataDir().toAbsolutePath(), configuredCacheSize)); + } else if (percentageUsage(usableSpace.usableSpace(), cacheSize) > 80.0) { + LOG.warn("Your system is running out of disk space. Current node_search_cache_size is configured to {} " + + "and your disk has only {} available.", configuredCacheSize, usableHumanReadable); + } + } + + private double percentageUsage(long usableSpace, long cacheSize) { + return 100.0 / usableSpace * cacheSize; + } + + @Nonnull + private static String toHumanReadableSize(long usableSpace) { + return FileUtils.byteCountToDisplaySize(usableSpace).replaceFirst("\\s", "").toLowerCase(Locale.ROOT); + } + + + public static long toBytes(String cacheSize) { + long returnValue = -1; + Pattern patt = Pattern.compile("([\\d.]+)([GMK]B)", Pattern.CASE_INSENSITIVE); + Matcher matcher = patt.matcher(cacheSize); + Map powerMap = new HashMap<>(); + powerMap.put("GB", 3); + powerMap.put("MB", 2); + powerMap.put("KB", 1); + if (matcher.find()) { + String number = matcher.group(1); + int pow = powerMap.get(matcher.group(2).toUpperCase(Locale.ROOT)); + BigDecimal bytes = new BigDecimal(number); + bytes = bytes.multiply(BigDecimal.valueOf(1024).pow(pow)); + returnValue = bytes.longValue(); + } + + if (returnValue == -1) { + throw new PreflightCheckException(String.format(Locale.ROOT, "Unexpected value %s of node_search_cache_size", cacheSize)); + } + + return returnValue; + } + + private Map properties(boolean searchRoleEnabled) { + final ImmutableMap.Builder builder = ImmutableMap.builder(); + + if(searchRoleEnabled) { // configure cache only if we also have the search role + builder.put("node.search.cache.size", localConfiguration.getNodeSearchCacheSize()); + } + + if (isSharedFileSystemRepo()) { + // https://opensearch.org/docs/latest/tuning-your-cluster/availability-and-recovery/snapshots/snapshot-restore/#shared-file-system + if (localConfiguration.getPathRepo() != null && !localConfiguration.getPathRepo().isEmpty()) { + builder.put("path.repo", String.join(",", localConfiguration.getPathRepo())); + } + } + + if (s3RepositoryConfiguration.isRepositoryEnabled()) { + builder.putAll(s3RepositoryConfiguration.toOpensearchProperties()); + } + return builder.build(); + } + + private Map keystoreItems() { + final ImmutableMap.Builder builder = ImmutableMap.builder(); + if (s3RepositoryConfiguration.isRepositoryEnabled()) { + builder.put("s3.client.default.access_key", s3RepositoryConfiguration.getS3ClientDefaultAccessKey()); + builder.put("s3.client.default.secret_key", s3RepositoryConfiguration.getS3ClientDefaultSecretKey()); + } + return builder.build(); + } + + private boolean snapshotsAreEnabled() { + return s3RepositoryConfiguration.isRepositoryEnabled() || isSharedFileSystemRepo(); + } + + private boolean isSharedFileSystemRepo() { + return localConfiguration.getPathRepo() != null && !localConfiguration.getPathRepo().isEmpty(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/rest/OpensearchRestClient.java b/data-node/src/main/java/org/graylog/datanode/opensearch/rest/OpensearchRestClient.java new file mode 100644 index 000000000000..351d00ef6bae --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/rest/OpensearchRestClient.java @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.rest; + +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.opensearch.configuration.OpensearchConfiguration; +import org.graylog.shaded.opensearch2.org.apache.http.HttpHost; +import org.graylog.shaded.opensearch2.org.apache.http.HttpRequestInterceptor; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; + +public class OpensearchRestClient { + private static final Logger LOG = LoggerFactory.getLogger(OpensearchRestClient.class); + + public static RestHighLevelClient build(final OpensearchConfiguration configuration, final DatanodeConfiguration datanodeConfiguration, final TrustManager tm) { + final HttpHost host = configuration.getRestBaseUrl(); + + org.graylog.shaded.opensearch2.org.opensearch.client.RestClientBuilder builder = org.graylog.shaded.opensearch2.org.opensearch.client.RestClient.builder(host); + if ("https".equals(host.getSchemeName())) { + + try { + final var sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, new TrustManager[]{tm}, new SecureRandom()); + + builder.setHttpClientConfigCallback(httpClientBuilder -> { + httpClientBuilder.addInterceptorLast((HttpRequestInterceptor) (request, context) -> { + final String jwtToken = datanodeConfiguration.indexerJwtAuthTokenProvider().get(); + request.addHeader("Authorization", jwtToken); + }); + httpClientBuilder.setSSLContext(sslContext); + return httpClientBuilder; + }); + } catch (NoSuchAlgorithmException | KeyManagementException ex) { + LOG.error("Could not initialize SSL correctly: {}", ex.getMessage(), ex); + } + } + return new RestHighLevelClient(builder); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/FailuresCounter.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/FailuresCounter.java new file mode 100644 index 000000000000..89d85e032e94 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/FailuresCounter.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine; + +public class FailuresCounter { + + private int counter; + private final int initialValue; + private final int maxFailuresCount; + + private FailuresCounter(int initialValue, int maxFailuresCount) { + this.maxFailuresCount = maxFailuresCount; + this.initialValue = initialValue; + resetFailuresCounter(); + } + + public static FailuresCounter oneBased(int maxFailuresCount) { + return new FailuresCounter(1, maxFailuresCount); + } + + public static FailuresCounter zeroBased(int maxFailuresCount) { + return new FailuresCounter(0, maxFailuresCount); + } + + public synchronized void increment() { + this.counter++; + } + + public synchronized boolean failedTooManyTimes() { + return this.counter >= maxFailuresCount; + } + + public synchronized void resetFailuresCounter() { + this.counter = initialValue; + } + + public int failuresCount() { + return counter; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchEvent.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchEvent.java new file mode 100644 index 000000000000..4b44c079f5ee --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchEvent.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine; + +public enum OpensearchEvent { + PROCESS_PREPARED, + PROCESS_STARTED, + HEALTH_CHECK_OK, + HEALTH_CHECK_FAILED, + PROCESS_STOPPED, + PROCESS_REMOVE, + RESET, // user-triggered action + PROCESS_TERMINATED // failure from outside, not requested +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchState.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchState.java new file mode 100644 index 000000000000..8b83327f238a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchState.java @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine; + +import org.graylog2.cluster.nodes.DataNodeStatus; + +public enum OpensearchState { + /** + * Fresh created process, not started yet + */ + WAITING_FOR_CONFIGURATION(DataNodeStatus.UNCONFIGURED), + /** + * All configuration files have been written + */ + PREPARED(DataNodeStatus.PREPARED), + /** + * The process is running on the underlying OS and has a process ID. It's not responding to the REST API yet + */ + STARTING(DataNodeStatus.STARTING), + /** + * Opensearch is now available on the default port with GREEN cluster status, all good + */ + AVAILABLE(DataNodeStatus.AVAILABLE), + /** + * There are problems in the REST communication with the opensearch. We'll retry several times before + * we go to the FAILED state, giving up. + */ + NOT_RESPONDING(DataNodeStatus.UNAVAILABLE), + + /** + * Failed to reach the opensearch REST API, but the underlying process is still alive + */ + FAILED(DataNodeStatus.UNAVAILABLE), + + /** + * Removal of node from Opensearch cluster requested + */ + REMOVING(DataNodeStatus.REMOVING), + /** + * Removal of node from Opensearch cluster completed + */ + REMOVED(DataNodeStatus.REMOVED), + + /** + * The OS process is not running anymore on the underlying system, it has been terminated + */ + TERMINATED(DataNodeStatus.UNAVAILABLE); + + + private final DataNodeStatus dataNodeStatus; + + OpensearchState(DataNodeStatus dataNodeStatus) { + this.dataNodeStatus = dataNodeStatus; + } + + public DataNodeStatus getDataNodeStatus() { + return dataNodeStatus; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachine.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachine.java new file mode 100644 index 000000000000..842928349de4 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachine.java @@ -0,0 +1,158 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine; + +import com.github.oxo42.stateless4j.StateMachine; +import com.github.oxo42.stateless4j.StateMachineConfig; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTracer; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTracerAggregator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Set; + +public class OpensearchStateMachine extends StateMachine { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchStateMachine.class); + + /** + * How many times can the OS rest api call fail before we switch to the failed state + */ + public static final int MAX_REST_TEMPORARY_FAILURES = 3; + public static final int MAX_REST_STARTUP_FAILURES = 5; + public static final int MAX_REBOOT_FAILURES = 3; + + StateMachineTracerAggregator tracerAggregator = new StateMachineTracerAggregator(); + + public OpensearchStateMachine(OpensearchState initialState, StateMachineConfig config) { + super(initialState, config); + setTrace(tracerAggregator); + } + + public static OpensearchStateMachine createNew(OpensearchProcess process, Set tracer) { + final FailuresCounter restFailureCounter = FailuresCounter.oneBased(MAX_REST_TEMPORARY_FAILURES); + final FailuresCounter startupFailuresCounter = FailuresCounter.oneBased(MAX_REST_STARTUP_FAILURES); + final FailuresCounter rebootCounter = FailuresCounter.oneBased(MAX_REBOOT_FAILURES); + + StateMachineConfig config = new StateMachineConfig<>(); + + // Freshly created process, it hasn't started yet and doesn't have any pid. + config.configure(OpensearchState.WAITING_FOR_CONFIGURATION) + .permit(OpensearchEvent.PROCESS_PREPARED, OpensearchState.PREPARED) + // jump to started only allowed to facilitate startup with insecure config + .permit(OpensearchEvent.PROCESS_STARTED, OpensearchState.STARTING) + .ignore(OpensearchEvent.PROCESS_STOPPED) + .ignore(OpensearchEvent.HEALTH_CHECK_FAILED); + + config.configure(OpensearchState.PREPARED) + .permit(OpensearchEvent.PROCESS_STARTED, OpensearchState.STARTING) + .permit(OpensearchEvent.PROCESS_TERMINATED, OpensearchState.TERMINATED) + .permit(OpensearchEvent.PROCESS_STOPPED, OpensearchState.TERMINATED) + .ignore(OpensearchEvent.HEALTH_CHECK_FAILED); + + // the process has started already, now we have to wait for a running OS and available REST api + // the startupFailuresCounter keeps track of failed REST status calls and allow failures during the + // startup period + config.configure(OpensearchState.STARTING) + .onEntryFrom(OpensearchEvent.PROCESS_STARTED, process::start) // we don't want to re-trigger start from OpensearchEvent.HEALTH_CHECK_FAILED bellow + .permitDynamic(OpensearchEvent.HEALTH_CHECK_FAILED, + () -> startupFailuresCounter.failedTooManyTimes() ? OpensearchState.FAILED : OpensearchState.STARTING, + startupFailuresCounter::increment) + .permit(OpensearchEvent.HEALTH_CHECK_OK, OpensearchState.AVAILABLE) + .permit(OpensearchEvent.PROCESS_STOPPED, OpensearchState.TERMINATED) + .permit(OpensearchEvent.PROCESS_TERMINATED, OpensearchState.TERMINATED); + + // the process is running and responding to the REST status, it's available for any usage + config.configure(OpensearchState.AVAILABLE) + .onEntry(restFailureCounter::resetFailuresCounter) + .onEntry(rebootCounter::resetFailuresCounter) + .onEntry(process::available) + .permitReentry(OpensearchEvent.HEALTH_CHECK_OK) + .permit(OpensearchEvent.HEALTH_CHECK_FAILED, OpensearchState.NOT_RESPONDING) + .permit(OpensearchEvent.PROCESS_STOPPED, OpensearchState.TERMINATED) + .permit(OpensearchEvent.PROCESS_TERMINATED, OpensearchState.TERMINATED) + .permit(OpensearchEvent.PROCESS_REMOVE, OpensearchState.REMOVING) + .permit(OpensearchEvent.PROCESS_PREPARED, OpensearchState.PREPARED, process::stop) //restart if reconfigured + .permit(OpensearchEvent.PROCESS_STARTED, OpensearchState.STARTING); // allow restarts + + // if the REST api is not responding, we'll jump to this state and count how many times the failure + // occurs. If it fails ttoo many times, we'll mark the process as FAILED + config.configure(OpensearchState.NOT_RESPONDING) + .permitDynamic(OpensearchEvent.HEALTH_CHECK_FAILED, + () -> restFailureCounter.failedTooManyTimes() ? OpensearchState.FAILED : OpensearchState.NOT_RESPONDING, + restFailureCounter::increment + ) + .permit(OpensearchEvent.HEALTH_CHECK_OK, OpensearchState.AVAILABLE) + .permit(OpensearchEvent.PROCESS_STOPPED, OpensearchState.TERMINATED) + .permit(OpensearchEvent.PROCESS_TERMINATED, OpensearchState.TERMINATED); + + // failed and we see the process as not recoverable. + // TODO: what to do if the process fails? Reboot? + config.configure(OpensearchState.FAILED) + .ignore(OpensearchEvent.HEALTH_CHECK_FAILED) + .permit(OpensearchEvent.HEALTH_CHECK_OK, OpensearchState.AVAILABLE) + .permit(OpensearchEvent.PROCESS_STOPPED, OpensearchState.TERMINATED) + .permit(OpensearchEvent.PROCESS_PREPARED, OpensearchState.PREPARED) //restart if reconfigured + .permit(OpensearchEvent.PROCESS_TERMINATED, OpensearchState.TERMINATED) + .permit(OpensearchEvent.PROCESS_STARTED, OpensearchState.STARTING); + + // final state, the process is not alive anymore, terminated on the operating system level + config.configure(OpensearchState.TERMINATED) + .onEntry(process::stop) + .permit(OpensearchEvent.PROCESS_STARTED, OpensearchState.STARTING, rebootCounter::increment) + .ignore(OpensearchEvent.HEALTH_CHECK_FAILED) + .ignore(OpensearchEvent.PROCESS_STOPPED) + .ignore(OpensearchEvent.PROCESS_TERMINATED); // final state, all following terminate events are ignored + + config.configure(OpensearchState.REMOVING) + .onEntry(process::remove) + .ignore(OpensearchEvent.HEALTH_CHECK_OK) + .permit(OpensearchEvent.HEALTH_CHECK_FAILED, OpensearchState.FAILED) + .permit(OpensearchEvent.PROCESS_STOPPED, OpensearchState.REMOVED); + + config.configure(OpensearchState.REMOVED) + .onEntry(process::stop) + .permit(OpensearchEvent.RESET, OpensearchState.WAITING_FOR_CONFIGURATION, process::reset) + .ignore(OpensearchEvent.PROCESS_STOPPED); + + OpensearchStateMachine stateMachine = new OpensearchStateMachine(OpensearchState.WAITING_FOR_CONFIGURATION, config); + tracer.forEach(t -> { + t.setStateMachine(stateMachine); + stateMachine.getTracerAggregator().addTracer(t); + }); + return stateMachine; + } + + public StateMachineTracerAggregator getTracerAggregator() { + return tracerAggregator; + } + + private void fire(OpensearchEvent trigger, OpensearchEvent errorEvent) { + try { + super.fire(trigger); + } catch (Exception e) { + LOG.error("Failed to fire event " + trigger, e); + super.fire(errorEvent); + } + } + + @Override + public void fire(OpensearchEvent trigger) { + fire(trigger, OpensearchEvent.HEALTH_CHECK_FAILED); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachineProvider.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachineProvider.java new file mode 100644 index 000000000000..774e95c08b1a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachineProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine; + +import jakarta.inject.Inject; +import jakarta.inject.Provider; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTracer; + +import java.util.Set; + +public class OpensearchStateMachineProvider implements Provider { + private final OpensearchStateMachine opensearchStateMachine; + + @Inject + public OpensearchStateMachineProvider(Set tracer, OpensearchProcess process) { + this.opensearchStateMachine = OpensearchStateMachine.createNew(process, tracer); + } + + @Override + public OpensearchStateMachine get() { + return opensearchStateMachine; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/ClusterNodeStateTracer.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/ClusterNodeStateTracer.java new file mode 100644 index 000000000000..b61d2a2b78ac --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/ClusterNodeStateTracer.java @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine.tracer; + +import jakarta.inject.Inject; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog2.cluster.NodeNotFoundException; +import org.graylog2.cluster.nodes.DataNodeDto; +import org.graylog2.cluster.nodes.NodeService; +import org.graylog2.datanode.DataNodeLifecycleTrigger; +import org.graylog2.plugin.system.NodeId; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ClusterNodeStateTracer implements StateMachineTracer { + + private final Logger log = LoggerFactory.getLogger(ClusterNodeStateTracer.class); + + private final NodeService nodeService; + private final NodeId nodeId; + + @Inject + public ClusterNodeStateTracer(NodeService nodeService, NodeId nodeId) { + this.nodeService = nodeService; + this.nodeId = nodeId; + } + + @Override + public void trigger(OpensearchEvent processEvent) { + } + + @Override + public void transition(OpensearchEvent processEvent, OpensearchState source, OpensearchState destination) { + try { + if (!source.equals(destination)) { + log.info("Updating cluster node {} from {} to {} (reason: {})", nodeId.getNodeId(), + source.getDataNodeStatus(), destination.getDataNodeStatus(), processEvent.name()); + DataNodeDto node = nodeService.byNodeId(nodeId); + nodeService.update(node.toBuilder() + .setDataNodeStatus(destination.getDataNodeStatus()) + .setActionQueue(DataNodeLifecycleTrigger.CLEAR) + .build()); + } + } catch (NodeNotFoundException e) { + throw new RuntimeException("Node not registered, this should not happen."); + } + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/OpensearchWatchdog.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/OpensearchWatchdog.java new file mode 100644 index 000000000000..d10a0631b150 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/OpensearchWatchdog.java @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine.tracer; + +import jakarta.inject.Inject; +import org.graylog.datanode.opensearch.statemachine.FailuresCounter; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This process watchdog follows transitions of the state machine and will try to restart the process in case of termination. + * If the process is actually stopped, it won't restart it and will automatically deactivate itself. + */ +public class OpensearchWatchdog implements StateMachineTracer { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchWatchdog.class); + + private boolean active; + private final FailuresCounter restartCounter; + private OpensearchStateMachine stateMachine; + + @Inject + public OpensearchWatchdog() { + this(3); + } + + public OpensearchWatchdog(int restartAttemptsCount) { + this.restartCounter = FailuresCounter.zeroBased(restartAttemptsCount); + } + + @Override + public void trigger(OpensearchEvent trigger) { + LOG.debug("Watchdog trigger: {}", trigger); + } + + @Override + public void transition(OpensearchEvent trigger, OpensearchState source, OpensearchState destination) { + LOG.debug("Watchdog transition event:{}, source:{}, destination:{}", trigger, source, destination); + switch (trigger) { + case PROCESS_STARTED -> activateWatchdog(); + case PROCESS_TERMINATED -> restartProcess(); + case HEALTH_CHECK_OK -> resetCounter(); + case PROCESS_STOPPED -> deactivateWatchdog(); + } + } + + private void resetCounter() { + this.restartCounter.resetFailuresCounter(); + } + + private void activateWatchdog() { + this.active = true; + } + + private void deactivateWatchdog() { + this.active = false; + } + + private void restartProcess() { + if (this.active) { + if (!restartCounter.failedTooManyTimes()) { + try { + LOG.info("Detected terminated process, restarting. Attempt #{}", restartCounter.failuresCount() + 1); + this.stateMachine.fire(OpensearchEvent.PROCESS_STARTED); + } catch (Exception e) { + LOG.warn("Failed to restart process", e); + } finally { + restartCounter.increment(); + } + } else { + // give up trying, stop the watchdog + LOG.warn("Process watchdog terminated after too many restart attempts"); + active = false; + } + } + } + + public boolean isActive() { + return active; + } + + @Override + public void setStateMachine(OpensearchStateMachine stateMachine) { + this.stateMachine = stateMachine; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTracer.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTracer.java new file mode 100644 index 000000000000..987f17291ec8 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTracer.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine.tracer; + +import com.github.oxo42.stateless4j.delegates.Trace; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; + +/** + * The tracer allows to observe triggered event (before) and transitions (after) of the {@link OpensearchStateMachine} + */ +public interface StateMachineTracer extends Trace { + + default void setStateMachine(OpensearchStateMachine stateMachine) { + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTracerAggregator.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTracerAggregator.java new file mode 100644 index 000000000000..f50a0e6210f0 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTracerAggregator.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine.tracer; + +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; + +import java.util.LinkedList; +import java.util.List; + +public class StateMachineTracerAggregator implements StateMachineTracer { + + private final List delegates = new LinkedList<>(); + + public void addTracer(StateMachineTracer tracer) { + delegates.add(tracer); + } + + public void removeTracer(StateMachineTracer tracer) { + delegates.remove(tracer); + } + + @Override + public void trigger(OpensearchEvent processEvent) { + delegates.forEach(d -> d.trigger(processEvent)); + } + + @Override + public void transition(OpensearchEvent processEvent, OpensearchState s1, OpensearchState s2) { + delegates.forEach(d -> d.transition(processEvent, s1, s2)); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTransitionLogger.java b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTransitionLogger.java new file mode 100644 index 000000000000..42969d3b1db8 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/opensearch/statemachine/tracer/StateMachineTransitionLogger.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine.tracer; + +import jakarta.inject.Inject; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StateMachineTransitionLogger implements StateMachineTracer { + + private static final Logger LOG = LoggerFactory.getLogger(StateMachineTransitionLogger.class); + + @Inject + public StateMachineTransitionLogger() { + } + + @Override + public void trigger(OpensearchEvent trigger) { + + } + + @Override + public void transition(OpensearchEvent trigger, OpensearchState source, OpensearchState destination) { + if (!source.equals(destination)) { + LOG.debug("Triggered {}, source state: {}, destination: {}", trigger, source, destination); + } + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/periodicals/ClusterStateResponse.java b/data-node/src/main/java/org/graylog/datanode/periodicals/ClusterStateResponse.java new file mode 100644 index 000000000000..a1c46a49078e --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/periodicals/ClusterStateResponse.java @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.periodicals; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.Map; + +@JsonIgnoreProperties(ignoreUnknown = true) +public record ClusterStateResponse(@JsonProperty("cluster_manager_node") String clusterManagerNode, @JsonProperty("nodes") Map nodes) { + @JsonIgnoreProperties(ignoreUnknown = true) + public record NodeState(@JsonProperty("name") String name, @JsonProperty("transport_address") String transportAddress) { + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/periodicals/MetricsCollector.java b/data-node/src/main/java/org/graylog/datanode/periodicals/MetricsCollector.java new file mode 100644 index 000000000000..c20964078399 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/periodicals/MetricsCollector.java @@ -0,0 +1,198 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.periodicals; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; +import jakarta.annotation.Nonnull; +import jakarta.inject.Inject; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.metrics.ClusterStatMetricsCollector; +import org.graylog.datanode.metrics.NodeMetricsCollector; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.shaded.opensearch2.org.joda.time.DateTime; +import org.graylog.shaded.opensearch2.org.joda.time.DateTimeZone; +import org.graylog.shaded.opensearch2.org.opensearch.action.index.IndexRequest; +import org.graylog.shaded.opensearch2.org.opensearch.action.index.IndexResponse; +import org.graylog.shaded.opensearch2.org.opensearch.action.search.SearchRequest; +import org.graylog.shaded.opensearch2.org.opensearch.action.search.SearchResponse; +import org.graylog.shaded.opensearch2.org.opensearch.client.RequestOptions; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.graylog.shaded.opensearch2.org.opensearch.core.action.ActionListener; +import org.graylog.shaded.opensearch2.org.opensearch.index.query.QueryBuilders; +import org.graylog.shaded.opensearch2.org.opensearch.search.builder.SearchSourceBuilder; +import org.graylog.shaded.opensearch2.org.opensearch.search.sort.SortBuilders; +import org.graylog.shaded.opensearch2.org.opensearch.search.sort.SortOrder; +import org.graylog2.plugin.periodical.Periodical; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.ManagementFactory; +import java.lang.management.MemoryMXBean; +import java.lang.management.MemoryUsage; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +public class MetricsCollector extends Periodical { + + private static final Logger LOG = LoggerFactory.getLogger(MetricsCollector.class); + private final OpensearchProcess process; + private final Configuration configuration; + private NodeMetricsCollector nodeStatMetricsCollector; + private ClusterStatMetricsCollector clusterStatMetricsCollector; + private final ObjectMapper objectMapper; + + @Inject + public MetricsCollector(OpensearchProcess process, Configuration configuration, ObjectMapper objectMapper) { + this.process = process; + this.configuration = configuration; + this.objectMapper = objectMapper; + } + + @Override + public boolean runsForever() { + return false; + } + + @Override + public boolean stopOnGracefulShutdown() { + return false; + } + + @Override + public boolean startOnThisNode() { + return true; + } + + @Override + public boolean isDaemon() { + return true; + } + + @Override + public int getInitialDelaySeconds() { + return 0; + } + + @Override + public int getPeriodSeconds() { + return 60; + } + + @Nonnull + @Override + protected Logger getLogger() { + return LOG; + } + + @Override + public void doRun() { + if (process.isInState(OpensearchState.AVAILABLE)) { + process.restClient().ifPresent(client -> { + this.nodeStatMetricsCollector = new NodeMetricsCollector(client, objectMapper); + this.clusterStatMetricsCollector = new ClusterStatMetricsCollector(client, objectMapper); + final IndexRequest indexRequest = new IndexRequest(configuration.getMetricsStream()); + Map metrics = new HashMap(); + metrics.put(configuration.getMetricsTimestamp(), new DateTime(DateTimeZone.UTC)); + String node = configuration.getDatanodeNodeName(); + metrics.put("node", node); + addJvmMetrics(metrics); + metrics.putAll(nodeStatMetricsCollector.getNodeMetrics(node)); + indexRequest.source(metrics); + indexDocument(client, indexRequest); + + if (process.isManagerNode()) { + metrics = new HashMap<>(clusterStatMetricsCollector.getClusterMetrics(getPreviousMetricsForCluster(client))); + metrics.put(configuration.getMetricsTimestamp(), new DateTime(DateTimeZone.UTC)); + indexRequest.source(metrics); + indexDocument(client, indexRequest); + } + }); + } + } + + private void addJvmMetrics(Map metrics) { + MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); + metrics.put("dn_heap_usage", calcUsage(memoryMXBean.getHeapMemoryUsage())); + metrics.put("dn_non_heap_usage", calcUsage(memoryMXBean.getNonHeapMemoryUsage())); + + Runtime runtime = Runtime.getRuntime(); + metrics.put("dn_processors", runtime.availableProcessors()); + + metrics.put("dn_thread_count", Thread.activeCount()); + + long gcTime = ManagementFactory.getGarbageCollectorMXBeans().stream() + .mapToLong(GarbageCollectorMXBean::getCollectionTime) + .sum(); + metrics.put("dn_gc_time", gcTime); + } + + + public static Map> getDatanodeMetrics() { + return Map.of( + "dn_heap_usage", ImmutableMap.of("type", "float"), + "dn_non_heap_usage", ImmutableMap.of("type", "float"), + "dn_processors", ImmutableMap.of("type", "integer"), + "dn_thread_count", ImmutableMap.of("type", "integer"), + "dn_gc_time", ImmutableMap.of("type", "long") + ); + } + + private float calcUsage(MemoryUsage memoryUsage) { + return 100 * (float) memoryUsage.getUsed() / memoryUsage.getCommitted(); + } + + private Map getPreviousMetricsForCluster(RestHighLevelClient client) { + SearchRequest searchRequest = new SearchRequest(configuration.getMetricsStream()); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("node"))); // You can adjust the query based on your requirements + searchSourceBuilder.size(1); // Retrieve only one document + searchSourceBuilder.sort(SortBuilders.fieldSort(configuration.getMetricsTimestamp()).order(SortOrder.DESC)); // Sort by timestamp in descending order + searchRequest.source(searchSourceBuilder); + SearchResponse searchResponse = null; + try { + searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + LOG.error("Could not retrieve previous metrics", e); + } + + if (Objects.nonNull(searchResponse) && searchResponse.getHits().getTotalHits().value > 0) { + // Retrieve the first hit (latest document) from the search response + return searchResponse.getHits().getAt(0).getSourceAsMap(); + } else { + LOG.info("No previous metrics for cluster"); + } + return Map.of(); + } + + private static void indexDocument(RestHighLevelClient client, IndexRequest indexRequest) { + client.indexAsync(indexRequest, RequestOptions.DEFAULT, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + } + + @Override + public void onFailure(Exception e) { + LOG.error("Error indexing metrics", e); + } + }); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/periodicals/NodePingPeriodical.java b/data-node/src/main/java/org/graylog/datanode/periodicals/NodePingPeriodical.java new file mode 100644 index 000000000000..47d3edfd2a88 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/periodicals/NodePingPeriodical.java @@ -0,0 +1,167 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.periodicals; + +import jakarta.inject.Inject; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog2.cluster.nodes.DataNodeDto; +import org.graylog2.cluster.nodes.DataNodeStatus; +import org.graylog2.cluster.nodes.NodeService; +import org.graylog2.plugin.Version; +import org.graylog2.plugin.periodical.Periodical; +import org.graylog2.plugin.system.NodeId; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nonnull; +import java.net.URI; +import java.util.Date; +import java.util.List; +import java.util.function.Supplier; + +public class NodePingPeriodical extends Periodical { + + private static final Logger LOG = LoggerFactory.getLogger(NodePingPeriodical.class); + private final NodeService nodeService; + private final NodeId nodeId; + private final Supplier opensearchBaseUri; + private final Supplier opensearchClusterUri; + private final Supplier datanodeRestApiUri; + private final Configuration configuration; + private final Supplier processState; + private final Supplier certValidUntil; + private final Supplier> opensearchRoles; + + private final Version version = Version.CURRENT_CLASSPATH; + + + @Inject + public NodePingPeriodical(NodeService nodeService, NodeId nodeId, Configuration configuration, OpensearchProcess managedOpenSearch, DatanodeKeystore datanodeKeystore) { + this( + nodeService, + nodeId, + configuration, + managedOpenSearch::getOpensearchBaseUrl, + managedOpenSearch::getOpensearchClusterUrl, + managedOpenSearch::getDatanodeRestApiUrl, + () -> managedOpenSearch.processInfo().state(), + datanodeKeystore::getCertificateExpiration, + managedOpenSearch::getOpensearchRoles + ); + } + + NodePingPeriodical( + NodeService nodeService, + NodeId nodeId, + Configuration configuration, + Supplier opensearchBaseUri, + Supplier opensearchClusterUri, + Supplier datanodeRestApiUri, + Supplier processState, + Supplier certValidUntil, + Supplier> opensearchRoles + ) { + this.nodeService = nodeService; + this.nodeId = nodeId; + this.opensearchBaseUri = opensearchBaseUri; + this.opensearchClusterUri = opensearchClusterUri; + this.datanodeRestApiUri = datanodeRestApiUri; + this.configuration = configuration; + this.processState = processState; + this.certValidUntil = certValidUntil; + this.opensearchRoles = opensearchRoles; + } + + @Override + public boolean runsForever() { + return false; + } + + @Override + public boolean stopOnGracefulShutdown() { + return false; + } + + @Override + public boolean startOnThisNode() { + return true; + } + + @Override + public boolean isDaemon() { + return true; + } + + @Override + public int getInitialDelaySeconds() { + return 0; + } + + @Override + public int getPeriodSeconds() { + return 1; + } + + @Nonnull + @Override + protected Logger getLogger() { + return LOG; + } + + @Override + public void initialize() { + registerServer(); + } + + @Override + public void doRun() { + final DataNodeDto dto = DataNodeDto.Builder.builder() + .setId(nodeId.getNodeId()) + .setTransportAddress(opensearchBaseUri.get().toString()) + .setClusterAddress(opensearchClusterUri.get()) + .setDataNodeStatus(processState.get().getDataNodeStatus()) + .setHostname(configuration.getHostname()) + .setRestApiAddress(datanodeRestApiUri.get()) + .setCertValidUntil(certValidUntil.get()) + .setDatanodeVersion(version.getVersion().toString()) + .setOpensearchRoles(opensearchRoles.get()) + .build(); + + nodeService.ping(dto); + + } + + private void registerServer() { + final boolean registrationSucceeded = nodeService.registerServer(DataNodeDto.Builder.builder() + .setId(nodeId.getNodeId()) + .setTransportAddress(opensearchBaseUri.get().toString()) + .setClusterAddress(opensearchClusterUri.get()) + .setHostname(configuration.getHostname()) + .setDataNodeStatus(DataNodeStatus.STARTING) + .setCertValidUntil(certValidUntil.get()) + .setDatanodeVersion(version.getVersion().toString()) + .setOpensearchRoles(opensearchRoles.get()) + .build()); + + if (!registrationSucceeded) { + LOG.error("Failed to register node {} for heartbeats.", nodeId.getNodeId()); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/periodicals/OpensearchNodeHeartbeat.java b/data-node/src/main/java/org/graylog/datanode/periodicals/OpensearchNodeHeartbeat.java new file mode 100644 index 000000000000..59816950b53f --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/periodicals/OpensearchNodeHeartbeat.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.periodicals; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.shaded.opensearch2.org.opensearch.OpenSearchStatusException; +import org.graylog.shaded.opensearch2.org.opensearch.client.RequestOptions; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.graylog.shaded.opensearch2.org.opensearch.client.core.MainResponse; +import org.graylog2.plugin.periodical.Periodical; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nonnull; +import java.io.IOException; +import java.util.Optional; + +@Singleton +public class OpensearchNodeHeartbeat extends Periodical { + + private static final Logger LOG = LoggerFactory.getLogger(OpensearchNodeHeartbeat.class); + private final OpensearchProcess process; + + @Inject + public OpensearchNodeHeartbeat(OpensearchProcess process) { + this.process = process; + } + + @Override + // This method is "synchronized" because we are also calling it directly in AutomaticLeaderElectionService + public synchronized void doRun() { + if (!process.isInState(OpensearchState.TERMINATED) && !process.isInState(OpensearchState.WAITING_FOR_CONFIGURATION) + && !process.isInState(OpensearchState.REMOVED)) { + + final Optional restClient = process.restClient(); + if (restClient.isPresent()) { + try { + final MainResponse health = restClient.get() + .info(RequestOptions.DEFAULT); + onNodeResponse(process, health); + } catch (IOException | OpenSearchStatusException e) { + onRestError(process, e); + } + } + } + } + + private void onNodeResponse(OpensearchProcess process, MainResponse nodeResponse) { + process.onEvent(OpensearchEvent.HEALTH_CHECK_OK); + } + + private void onRestError(OpensearchProcess process, Exception e) { + process.onEvent(OpensearchEvent.HEALTH_CHECK_FAILED); + LOG.warn("Opensearch REST api of process {} unavailable. Cause: {}", process.processInfo().process().pid(), e.getMessage()); + } + + @Nonnull + @Override + protected Logger getLogger() { + return LOG; + } + + @Override + public boolean runsForever() { + return false; + } + + @Override + public boolean stopOnGracefulShutdown() { + return false; + } + + @Override + public boolean leaderOnly() { + return false; + } + + @Override + public boolean startOnThisNode() { + return true; + } + + @Override + public boolean isDaemon() { + return true; + } + + @Override + public int getInitialDelaySeconds() { + return 0; + } + + @Override + public int getPeriodSeconds() { + return 10; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/CommandLineProcess.java b/data-node/src/main/java/org/graylog/datanode/process/CommandLineProcess.java new file mode 100644 index 000000000000..29a475dfbc4f --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/CommandLineProcess.java @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import jakarta.validation.constraints.NotNull; +import org.apache.commons.exec.CommandLine; +import org.apache.commons.exec.DefaultExecutor; +import org.apache.commons.exec.ExecuteWatchdog; +import org.apache.commons.exec.PumpStreamHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; + + +public class CommandLineProcess { + private final Path executable; + private final List arguments; + private final ProcessListener listener; + private final Environment environment; + private final WatchdogWithProcessInfo watchDog; + + public CommandLineProcess(Path executable, + List arguments, + ProcessListener listener, + Environment environment) { + this.executable = executable; + this.arguments = arguments; + this.listener = listener; + this.environment = environment; + this.watchDog = new WatchdogWithProcessInfo(ExecuteWatchdog.INFINITE_TIMEOUT); + } + + private static final Logger LOG = LoggerFactory.getLogger(CommandLineProcess.class); + + public void start() { + LOG.info("Running process from " + executable.toAbsolutePath()); + + CommandLine cmdLine = new CommandLine(executable.toAbsolutePath().toString()); + arguments.forEach(it -> cmdLine.addArgument(it, true)); + + try { + createExecutor().execute(cmdLine, environment.getEnv(), listener); + listener.onStart(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private DefaultExecutor createExecutor() { + DefaultExecutor executor = new DefaultExecutor(); + executor.setStreamHandler(new PumpStreamHandler(new LoggingOutputStream(listener::onStdOut), new LoggingOutputStream(listener::onStdErr))); + executor.setWatchdog(watchDog); + return executor; + } + + public void stop() { + this.watchDog.destroyProcess(); + } + + @NotNull + public ProcessInformation processInfo() { + return watchDog.processInfo(); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/CommandLineProcessListener.java b/data-node/src/main/java/org/graylog/datanode/process/CommandLineProcessListener.java new file mode 100644 index 000000000000..19ca39175769 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/CommandLineProcessListener.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import org.apache.commons.exec.ExecuteException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This listener allows to be disabled from outside - if we manually stop the process, we want to stop all the incoming + * events as well, otherwise we'll receive a process termination after the stop, which will then confuse our watchdog + * and other parts of the datanode. + */ +public class CommandLineProcessListener implements ProcessListener { + + private static final Logger LOG = LoggerFactory.getLogger(CommandLineProcessListener.class); + + private final ProcessListener delegate; + private boolean listening = true; + + public CommandLineProcessListener(ProcessListener delegate) { + this.delegate = delegate; + } + + public void stopListening() { + this.listening = false; + } + + + @Override + public void onProcessComplete(int exitValue) { + if(listening) { + delegate.onProcessComplete(exitValue); + } else { + LOG.info("Ignoring onProcessComplete({}) call, this process is already stopped", exitValue); + } + } + + @Override + public void onProcessFailed(ExecuteException e) { + if(listening) { + delegate.onProcessFailed(e); + } else { + LOG.info("Ignoring onProcessFailed({}) call, this process is already stopped", e.getMessage()); + } + } + + @Override + public void onStart() { + if(listening) { + delegate.onStart(); + } else { + LOG.info("Ignoring onStart() call, this process is already stopped"); + } + } + + @Override + public void onStdOut(String line) { + if(listening) { + delegate.onStdOut(line); + } + } + + @Override + public void onStdErr(String line) { + if(listening) { + delegate.onStdErr(line); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/Environment.java b/data-node/src/main/java/org/graylog/datanode/process/Environment.java new file mode 100644 index 000000000000..56738354a83e --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/Environment.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +import static java.util.function.Predicate.not; + +public class Environment { + + private static final String JAVA_HOME_ENV = "JAVA_HOME"; + + private final Map env; + private final Map additionalVariables = new HashMap<>(); + + public Environment(Map env) { + this.env = env; + } + + public Environment put(String key, String value) { + this.additionalVariables.put(key, value); + return this; + } + + public Map getEnv() { + Map env = new HashMap<>(); + env.putAll(cleanEnvironment(this.env)); + env.putAll(additionalVariables); + return Collections.unmodifiableMap(env); + } + + private Map cleanEnvironment(Map env) { + return env.entrySet().stream() + // Remove JAVA_HOME from environment because OpenSearch should use its bundled JVM. + .filter(not(entry -> JAVA_HOME_ENV.equals(entry.getKey()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } +} + diff --git a/data-node/src/main/java/org/graylog/datanode/process/LoggingOutputStream.java b/data-node/src/main/java/org/graylog/datanode/process/LoggingOutputStream.java new file mode 100644 index 000000000000..d69970999fe9 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/LoggingOutputStream.java @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.function.Consumer; + +public class LoggingOutputStream extends OutputStream { + + private final StringBuilder builder = new StringBuilder(); + private final Consumer consumer; + + public LoggingOutputStream(Consumer consumer) { + this.consumer = consumer; + } + + @Override + public void write(int b) throws IOException { + if (b == '\n') { + consumeLine(builder.toString()); + builder.setLength(0); // reset the builder + } else { + builder.append((char) b); + } + } + + private void consumeLine(String line) { + consumer.accept(line); + } + + @Override + public void flush() throws IOException { + super.flush(); + if(!builder.isEmpty()) { + consumeLine(builder.toString()); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/ManagableProcess.java b/data-node/src/main/java/org/graylog/datanode/process/ManagableProcess.java new file mode 100644 index 000000000000..5316dee1ce54 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/ManagableProcess.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import com.github.oxo42.stateless4j.delegates.Trace; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTracer; + +public interface ManagableProcess { + + void configure(T configuration); + + void start(); + + void stop(); + + void onEvent(EVENT event); + + boolean isInState(STATE state); + +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/ProcessInformation.java b/data-node/src/main/java/org/graylog/datanode/process/ProcessInformation.java new file mode 100644 index 000000000000..ebebda1b827b --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/ProcessInformation.java @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import java.time.Instant; + +public record ProcessInformation(long pid, boolean alive, Instant started) { + + public static ProcessInformation empty() { + return new ProcessInformation(-1, false, null); + } + + public static ProcessInformation create(Process p) { + return new ProcessInformation(p.pid(), p.isAlive(), p.info().startInstant().orElse(null)); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/ProcessListener.java b/data-node/src/main/java/org/graylog/datanode/process/ProcessListener.java new file mode 100644 index 000000000000..b2631a975238 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/ProcessListener.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import org.apache.commons.exec.ExecuteResultHandler; + +public interface ProcessListener extends ExecuteResultHandler { + void onStart(); + void onStdOut(String line); + void onStdErr(String line); +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/WatchdogWithProcessInfo.java b/data-node/src/main/java/org/graylog/datanode/process/WatchdogWithProcessInfo.java new file mode 100644 index 000000000000..3b112c530106 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/WatchdogWithProcessInfo.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import org.apache.commons.exec.ExecuteWatchdog; + +import jakarta.validation.constraints.NotNull; + +import java.util.Optional; + +public class WatchdogWithProcessInfo extends ExecuteWatchdog { + + private Process process; + + public WatchdogWithProcessInfo(long timeout) { + super(timeout); + } + + @Override + public synchronized void start(Process processToMonitor) { + super.start(processToMonitor); + this.process = processToMonitor; + } + + @NotNull + public ProcessInformation processInfo() { + return Optional.ofNullable(process) + .map(ProcessInformation::create) + .orElse(ProcessInformation.empty()); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/ConfigurationBuildParams.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/ConfigurationBuildParams.java new file mode 100644 index 000000000000..70cfb938cf96 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/ConfigurationBuildParams.java @@ -0,0 +1,20 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.beans; + +public interface ConfigurationBuildParams { +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/DatanodeConfigurationBean.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/DatanodeConfigurationBean.java new file mode 100644 index 000000000000..c1a0a26ffe6e --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/DatanodeConfigurationBean.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.beans; + + +public interface DatanodeConfigurationBean { + DatanodeConfigurationPart buildConfigurationPart(T trustedCertificates); +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/DatanodeConfigurationPart.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/DatanodeConfigurationPart.java new file mode 100644 index 000000000000..66b8b9c8f200 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/beans/DatanodeConfigurationPart.java @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.beans; + +import com.google.auto.value.AutoValue; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import jakarta.annotation.Nullable; +import org.graylog.datanode.process.configuration.files.DatanodeConfigFile; +import org.graylog.security.certutil.csr.KeystoreInformation; + +import java.security.KeyStore; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +@AutoValue +public abstract class DatanodeConfigurationPart { + public abstract List nodeRoles(); + + public abstract Map keystoreItems(); + + public abstract Map properties(); + + public abstract List javaOpts(); + + public abstract Map systemProperties(); + + @Nullable + public abstract KeystoreInformation httpCertificate(); + + @Nullable + public abstract KeystoreInformation transportCertificate(); + + public abstract boolean securityConfigured(); + + @Nullable + public abstract KeyStore trustStore(); + + public abstract List configFiles(); + + public static Builder builder() { + return new AutoValue_DatanodeConfigurationPart.Builder() + .nodeRoles(Collections.emptyList()) + .keystoreItems(Collections.emptyMap()) + .properties(Collections.emptyMap()) + .javaOpts(Collections.emptyList()) + .configFiles(Collections.emptyList()) + .securityConfigured(false) + .trustStore(null) + .systemProperties(Collections.emptyMap()); + } + + @AutoValue.Builder + public abstract static class Builder { + + public abstract Builder nodeRoles(List nodeRoles); + + abstract ImmutableList.Builder nodeRolesBuilder(); + + public final Builder addNodeRole(String role) { + nodeRolesBuilder().add(role); + return this; + } + + public abstract Builder javaOpts(List javaOpts); + + abstract ImmutableList.Builder javaOptsBuilder(); + + public final Builder javaOpt(String opt) { + javaOptsBuilder().add(opt); + return this; + } + + public abstract Builder configFiles(List configFiles); + + abstract ImmutableList.Builder configFilesBuilder(); + + public Builder withConfigFile(DatanodeConfigFile configFile) { + configFilesBuilder().add(configFile); + return this; + } + + public abstract Builder keystoreItems(Map keystoreItems); + + public abstract Builder properties(Map properties); + + public abstract Builder httpCertificate(KeystoreInformation httpCertificate); + + public abstract Builder transportCertificate(KeystoreInformation httpCertificate); + + @Deprecated + public abstract Builder securityConfigured(boolean securityConfigured); + + public abstract Builder trustStore(@Nullable KeyStore truststore); + + + private final ImmutableMap.Builder systemPropertiesBuilder = ImmutableMap.builder(); + + ImmutableMap.Builder systemPropertiesBuilder() { + return systemPropertiesBuilder; + } + + abstract Builder systemProperties(Map systemProperties); // not public + + abstract DatanodeConfigurationPart autoBuild(); // not public + + public DatanodeConfigurationPart build() { + systemProperties(systemPropertiesBuilder.buildKeepingLast()); + return autoBuild(); + } + + public Builder systemProperty(String key, String value) { + systemPropertiesBuilder().put(key, value); + return this; + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/files/DatanodeConfigFile.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/DatanodeConfigFile.java new file mode 100644 index 000000000000..8137daee5755 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/DatanodeConfigFile.java @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.files; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Path; + +public interface DatanodeConfigFile { + + /** + * Target relative path of the configuration file. May include parent directories. + */ + Path relativePath(); + + /** + * Given a file stream, write the configuration file content in it. Everything will be automatically flushed and closed. + */ + void write(OutputStream stream) throws IOException; +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/files/InputStreamConfigFile.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/InputStreamConfigFile.java new file mode 100644 index 000000000000..f22d25283ba5 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/InputStreamConfigFile.java @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.files; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Path; + +public record InputStreamConfigFile(Path relativePath, ByteArrayInputStream inputStream) implements DatanodeConfigFile { + @Override + public void write(OutputStream output) throws IOException { + inputStream.transferTo(output); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/files/KeystoreConfigFile.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/KeystoreConfigFile.java new file mode 100644 index 000000000000..c4f87c0c05c7 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/KeystoreConfigFile.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.files; + +import org.graylog.datanode.configuration.OpensearchConfigurationException; +import org.graylog.security.certutil.csr.KeystoreInformation; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Path; + +public record KeystoreConfigFile(Path relativePath, + KeystoreInformation keystoreInformation) implements DatanodeConfigFile { + + @Override + public void write(OutputStream stream) throws IOException { + try { + if (keystoreInformation.password().length == 0) { + throw new IllegalArgumentException("Keystore password is empty!"); + } + keystoreInformation().loadKeystore().store(stream, keystoreInformation.password()); + } catch (Exception e) { + throw new OpensearchConfigurationException("Failed to persist opensearch keystore file " + relativePath, e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/files/OpensearchSecurityConfigurationFile.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/OpensearchSecurityConfigurationFile.java new file mode 100644 index 000000000000..d49786fdaaa3 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/OpensearchSecurityConfigurationFile.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.files; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import org.graylog2.security.JwtSecret; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; + +public class OpensearchSecurityConfigurationFile implements DatanodeConfigFile { + + private static final ObjectMapper OBJECT_MAPPER = new YAMLMapper(); + private static final Path TARGET_PATH = Path.of("opensearch-security", "config.yml"); + private final JwtSecret signingKey; + + public OpensearchSecurityConfigurationFile(final JwtSecret signingKey) { + this.signingKey = signingKey; + } + + @Override + public Path relativePath() { + return TARGET_PATH; + } + + @Override + public void write(OutputStream stream) throws IOException { + final InputStream configSource = getClass().getResourceAsStream("/opensearch/config/opensearch-security/config.yml"); + Map contents = OBJECT_MAPPER.readValue(configSource, new TypeReference<>() {}); + Map config = filterConfigurationMap(contents, "config", "dynamic", "authc", "jwt_auth_domain", "http_authenticator", "config"); + config.put("signing_key", signingKey.getBase64Encoded()); + OBJECT_MAPPER.writeValue(stream, contents); + } + + + private Map filterConfigurationMap(final Map map, final String... keys) { + Map result = map; + for (final String key : List.of(keys)) { + result = (Map) result.get(key); + } + return result; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/files/TextConfigFile.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/TextConfigFile.java new file mode 100644 index 000000000000..21c356a5151f --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/TextConfigFile.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.files; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; + +public record TextConfigFile(Path relativePath, String text) implements DatanodeConfigFile { + + @Override + public void write(OutputStream output) throws IOException { + try (final ByteArrayInputStream input = new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8))) { + input.transferTo(output); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/process/configuration/files/YamlConfigFile.java b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/YamlConfigFile.java new file mode 100644 index 000000000000..65a94dc21bfc --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/process/configuration/files/YamlConfigFile.java @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process.configuration.files; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Path; +import java.util.Map; + +public record YamlConfigFile(Path relativePath, Map config) implements DatanodeConfigFile { + private static final ObjectMapper MAPPER = new ObjectMapper(new YAMLFactory()); + + @Override + public void write(OutputStream output) throws IOException { + MAPPER.writeValue(output, config); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/CertificatesController.java b/data-node/src/main/java/org/graylog/datanode/rest/CertificatesController.java new file mode 100644 index 000000000000..6228024f3024 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/CertificatesController.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.configuration.DatanodeKeystoreException; +import org.graylog.datanode.configuration.OpensearchKeystoreProvider; +import org.graylog.security.certutil.KeyStoreDto; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.util.HashMap; +import java.util.Map; + +@Path("/certificates") +@Produces(MediaType.APPLICATION_JSON) +public class CertificatesController { + + private static final Logger log = LoggerFactory.getLogger(CertificatesController.class); + + private final DatanodeKeystore datanodeKeystore; + private final Map opensearchKeystore; + + @Inject + public CertificatesController(DatanodeKeystore keystore, Map opensearchKeystore) { + this.datanodeKeystore = keystore; + this.opensearchKeystore = opensearchKeystore; + } + + @GET + public Map getCertificates() { + Map certificates = new HashMap<>(); + try { + KeyStore keystore = datanodeKeystore.loadKeystore(); + certificates.put(OpensearchKeystoreProvider.Store.CONFIGURED, KeyStoreDto.fromKeyStore(keystore)); + } catch (DatanodeKeystoreException | KeyStoreException e) { + log.error("Could not load datanode keystore", e); + } + certificates.putAll(opensearchKeystore); + + return certificates; + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/CompatibilityResult.java b/data-node/src/main/java/org/graylog/datanode/rest/CompatibilityResult.java new file mode 100644 index 000000000000..fb86465782a8 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/CompatibilityResult.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import org.graylog.datanode.filesystem.index.dto.IndexerDirectoryInformation; + +public record CompatibilityResult(String hostname, String opensearchVersion, + IndexerDirectoryInformation info, + java.util.List compatibilityErrors, + java.util.List compatibilityWarnings) { +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/DataNodeStatus.java b/data-node/src/main/java/org/graylog/datanode/rest/DataNodeStatus.java new file mode 100644 index 000000000000..c3353ed8f0ee --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/DataNodeStatus.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.graylog.datanode.configuration.DatanodeDirectories; +import org.graylog2.plugin.Version; + +public record DataNodeStatus(@JsonIgnore Version appVersion, SystemInfo operatingSystem, StatusResponse opensearch, + DatanodeDirectories datanodeDirectories) { + + DataNodeStatus(Version appVersion, StatusResponse opensearch, DatanodeDirectories datanodeDirectories) { + this(appVersion, new SystemInfo(), opensearch, datanodeDirectories); + } + + @JsonProperty + public String dataNodeVersion() { + return this.appVersion.toString(); + } + + record SystemInfo(String osName, String osVersion, String javaVersion, String userName) { + public SystemInfo() { + this(System.getProperty("os.name"), System.getProperty("os.version"), System.getProperty("java.version"), System.getProperty("user.name")); + } + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/IndexStateController.java b/data-node/src/main/java/org/graylog/datanode/rest/IndexStateController.java new file mode 100644 index 000000000000..9ccb4dc2957f --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/IndexStateController.java @@ -0,0 +1,121 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import jakarta.inject.Inject; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import okhttp3.Credentials; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.RequestBody; +import org.graylog.datanode.configuration.DatanodeTrustManagerProvider; +import org.graylog.storage.opensearch2.IndexState; +import org.graylog.storage.opensearch2.IndexStateChangeRequest; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.util.Locale; +import java.util.Objects; + +import static org.graylog.datanode.rest.OpensearchConnectionCheckController.CONNECT_TIMEOUT; +import static org.graylog.datanode.rest.OpensearchConnectionCheckController.READ_TIMEOUT; +import static org.graylog.datanode.rest.OpensearchConnectionCheckController.WRITE_TIMEOUT; + +@Path("/index-state") +@Produces(MediaType.APPLICATION_JSON) +public class IndexStateController { + + private final DatanodeTrustManagerProvider datanodeTrustManagerProvider; + private final OkHttpClient httpClient; + + @Inject + public IndexStateController(DatanodeTrustManagerProvider datanodeTrustManagerProvider) { + this.datanodeTrustManagerProvider = datanodeTrustManagerProvider; + this.httpClient = new OkHttpClient.Builder() + .retryOnConnectionFailure(true) + .connectTimeout(CONNECT_TIMEOUT) + .writeTimeout(WRITE_TIMEOUT) + .readTimeout(READ_TIMEOUT) + .build(); + } + + @POST + @Path("/get") + public IndexState get(IndexStateChangeRequest indexStateChangeRequest) { + final String host = indexStateChangeRequest.host().endsWith("/") ? indexStateChangeRequest.host() : indexStateChangeRequest.host() + "/"; + final Request.Builder request = new Request.Builder() + .url(host + "_cat/indices/" + indexStateChangeRequest.indexName() + "/?h=status"); + if (Objects.nonNull(indexStateChangeRequest.username()) && Objects.nonNull(indexStateChangeRequest.password())) { + request.header("Authorization", Credentials.basic(indexStateChangeRequest.username(), indexStateChangeRequest.password())); + } + try (var response = getClient().newCall(request.build()).execute()) { + if (response.isSuccessful() && response.body() != null) { + final String state = response.body().string().trim().toUpperCase(Locale.ROOT); + return IndexState.valueOf(state); + } else { + throw new RuntimeException("Failed to detect open/close index status " + indexStateChangeRequest.indexName() + ". Code: " + response.code() + "; message=" + response.message()); + } + } catch (IOException e) { + throw new RuntimeException("Failed to open/close index" + indexStateChangeRequest.indexName(), e); + } + } + + @POST + @Path("/set") + public IndexState change(IndexStateChangeRequest indexStateChangeRequest) { + return performAction(indexStateChangeRequest); + } + + private IndexState performAction(IndexStateChangeRequest indexStateChangeRequest) { + final String host = indexStateChangeRequest.host().endsWith("/") ? indexStateChangeRequest.host() : indexStateChangeRequest.host() + "/"; + final Request.Builder request = new Request.Builder() + .post(RequestBody.create("", okhttp3.MediaType.parse(MediaType.APPLICATION_JSON))) + .url(host + indexStateChangeRequest.indexName() + "/" + (indexStateChangeRequest.action() == IndexState.OPEN ? "_open" : "_close")); + if (Objects.nonNull(indexStateChangeRequest.username()) && Objects.nonNull(indexStateChangeRequest.password())) { + request.header("Authorization", Credentials.basic(indexStateChangeRequest.username(), indexStateChangeRequest.password())); + } + try (var response = getClient().newCall(request.build()).execute()) { + if (response.isSuccessful()) { + return indexStateChangeRequest.action(); + } else { + throw new RuntimeException("Failed to open/close index " + indexStateChangeRequest.indexName() + ". Code: " + response.code() + "; message=" + response.message()); + } + } catch (IOException e) { + throw new RuntimeException("Failed to open/close index" + indexStateChangeRequest.indexName(), e); + } + } + + private OkHttpClient getClient() { + try { + final SSLContext ctx = SSLContext.getInstance("TLS"); + final X509TrustManager trustManager = datanodeTrustManagerProvider.get(); + ctx.init(null, new TrustManager[]{trustManager}, new SecureRandom()); + return httpClient.newBuilder().sslSocketFactory(ctx.getSocketFactory(), trustManager).build(); + } catch (NoSuchAlgorithmException | KeyManagementException e) { + throw new RuntimeException(e); + + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/IndicesDirectoryController.java b/data-node/src/main/java/org/graylog/datanode/rest/IndicesDirectoryController.java new file mode 100644 index 000000000000..c9c758c4ae71 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/IndicesDirectoryController.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.DirectoryReadableValidator; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.filesystem.index.IndicesDirectoryParser; +import org.graylog.datanode.filesystem.index.dto.IndexerDirectoryInformation; +import org.graylog.datanode.filesystem.index.dto.NodeInformation; +import org.graylog.shaded.opensearch2.org.opensearch.Version; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +@Path("/indices-directory") +@Produces(MediaType.APPLICATION_JSON) +public class IndicesDirectoryController { + + private final Configuration configuration; + private final DatanodeConfiguration datanodeConfiguration; + private final IndicesDirectoryParser indicesDirectoryParser; + private final DirectoryReadableValidator directoryReadableValidator = new DirectoryReadableValidator(); + + @Inject + public IndicesDirectoryController(Configuration configuration, DatanodeConfiguration datanodeConfiguration, IndicesDirectoryParser indicesDirectoryParser) { + this.configuration = configuration; + this.datanodeConfiguration = datanodeConfiguration; + this.indicesDirectoryParser = indicesDirectoryParser; + } + + @GET + @Path("compatibility") + public CompatibilityResult status() { + final java.nio.file.Path dataTargetDir = datanodeConfiguration.datanodeDirectories().getDataTargetDir(); + final String opensearchVersion = datanodeConfiguration.opensearchDistributionProvider().get().version(); + final String hostname = configuration.getHostname(); + try { + directoryReadableValidator.validate(dataTargetDir.toUri().toString(), dataTargetDir); + final IndexerDirectoryInformation info = indicesDirectoryParser.parse(dataTargetDir); + final Version currentVersion = Version.fromString(opensearchVersion); + + final List compatibilityWarnings = new ArrayList<>(); + + if (info.nodes().isEmpty() || info.nodes().stream().allMatch(n -> n.indices().isEmpty())) { + compatibilityWarnings.add("Your configured opensearch_data_location directory " + dataTargetDir.toAbsolutePath() + " doesn't contain any indices! Do you want to continue without migrating existing data?"); + } + + final List compatibilityErrors = info.nodes().stream() + .filter(node -> !isNodeCompatible(node, currentVersion)) + .map(node -> String.format(Locale.ROOT, "Current version %s of Opensearch is not compatible with index version %s", currentVersion, node.nodeVersion())) + .toList(); + + return new CompatibilityResult(hostname, opensearchVersion, info, compatibilityErrors, compatibilityWarnings); + } catch (Exception e) { + return new CompatibilityResult(hostname, opensearchVersion, new IndexerDirectoryInformation(dataTargetDir, Collections.emptyList()), Collections.singletonList(e.getMessage()), Collections.emptyList()); + } + } + + private static boolean isNodeCompatible(NodeInformation node, Version currentVersion) { + final Version nodeVersion = Version.fromString(node.nodeVersion()); + return node.nodeVersion() == null || currentVersion.isCompatible(nodeVersion); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/LogsController.java b/data-node/src/main/java/org/graylog/datanode/rest/LogsController.java new file mode 100644 index 000000000000..c1d2bf284efc --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/LogsController.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import jakarta.ws.rs.InternalServerErrorException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configuration; +import org.graylog.datanode.opensearch.OpensearchProcess; + +import jakarta.inject.Inject; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import org.graylog.datanode.rest.config.OnlyInSecuredNode; +import org.graylog2.log4j.MemoryAppender; + +import java.util.List; + +@Path("/logs") +@Produces(MediaType.APPLICATION_JSON) +public class LogsController { + + private static final String MEMORY_APPENDER_NAME = "datanode-internal-logs"; + + private final OpensearchProcess managedOpensearch; + + @Inject + public LogsController(OpensearchProcess managedOpenSearch) { + this.managedOpensearch = managedOpenSearch; + } + + @GET + @Path("/stdout") + public List getOpensearchStdout() { + return managedOpensearch.stdOutLogs(); + } + + @GET + @Path("/stderr") + public List getOpensearchStderr() { + return managedOpensearch.stdErrLogs(); + } + + @GET + @OnlyInSecuredNode + @Produces(MediaType.TEXT_PLAIN) + @Path("/internal") + public Response getOpensearchInternal() { + final Appender appender = getAppender(MEMORY_APPENDER_NAME); + if (appender == null) { + throw new NotFoundException("Memory appender is disabled. Please refer to the example log4j.xml file."); + } + + if (!(appender instanceof MemoryAppender memoryAppender)) { + throw new InternalServerErrorException("Memory appender is not an instance of MemoryAppender. Please refer to the example log4j.xml file."); + } + var mediaType = MediaType.valueOf(MediaType.TEXT_PLAIN); + + StreamingOutput streamingOutput = outputStream -> memoryAppender.streamFormattedLogMessages(outputStream, 0); + Response.ResponseBuilder response = Response.ok(streamingOutput, mediaType); + + return response.build(); + } + + + private Appender getAppender(final String appenderName) { + final LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + final Configuration configuration = loggerContext.getConfiguration(); + return configuration.getAppender(appenderName); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/ManagementController.java b/data-node/src/main/java/org/graylog/datanode/rest/ManagementController.java new file mode 100644 index 000000000000..e52a960e82db --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/ManagementController.java @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import org.graylog.datanode.rest.config.OnlyInSecuredNode; +import org.graylog2.datanode.DataNodeLifecycleEvent; +import org.graylog2.datanode.DataNodeLifecycleTrigger; +import org.graylog2.events.ClusterEventBus; +import org.graylog2.plugin.system.NodeId; + +import jakarta.inject.Inject; + +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +@Path("/management") +@Produces(MediaType.APPLICATION_JSON) +public class ManagementController { + + private final ClusterEventBus clusterEventBus; + private final NodeId nodeId; + + + @Inject + public ManagementController(ClusterEventBus clusterEventBus, NodeId nodeId) { + this.clusterEventBus = clusterEventBus; + this.nodeId = nodeId; + } + + @DELETE + @OnlyInSecuredNode + public void remove() { + postEvent(DataNodeLifecycleTrigger.REMOVE); + } + + @POST + @Path("/start") + @OnlyInSecuredNode + public void start() { + postEvent(DataNodeLifecycleTrigger.START); + } + + @POST + @Path("/stop") + @OnlyInSecuredNode + public void stop() { + postEvent(DataNodeLifecycleTrigger.STOP); + } + + private void postEvent(DataNodeLifecycleTrigger trigger) { + DataNodeLifecycleEvent e = DataNodeLifecycleEvent.create(nodeId.getNodeId(), trigger); + clusterEventBus.post(e); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/OpensearchConnectionCheckController.java b/data-node/src/main/java/org/graylog/datanode/rest/OpensearchConnectionCheckController.java new file mode 100644 index 000000000000..5a11826a1505 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/OpensearchConnectionCheckController.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import jakarta.annotation.Nonnull; +import jakarta.inject.Inject; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import okhttp3.Credentials; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import org.graylog.datanode.configuration.DatanodeTrustManagerProvider; +import org.graylog.storage.opensearch2.ConnectionCheckIndex; +import org.graylog.storage.opensearch2.ConnectionCheckRequest; +import org.graylog.storage.opensearch2.ConnectionCheckResponse; +import org.graylog2.security.TrustAllX509TrustManager; +import org.graylog2.security.untrusted.UntrustedCertificateExtractor; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.StringReader; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.Comparator; +import java.util.LinkedList; +import java.util.List; +import java.util.Locale; + +@Path("/connection-check") +@Produces(MediaType.APPLICATION_JSON) +public class OpensearchConnectionCheckController { + + public static final Duration CONNECT_TIMEOUT = Duration.ofSeconds(10); + public static final Duration WRITE_TIMEOUT = Duration.ofSeconds(10); + public static final Duration READ_TIMEOUT = Duration.ofSeconds(10); + private final DatanodeTrustManagerProvider datanodeTrustManagerProvider; + + private final OkHttpClient httpClient; + + @Inject + public OpensearchConnectionCheckController(DatanodeTrustManagerProvider datanodeTrustManagerProvider) { + this.datanodeTrustManagerProvider = datanodeTrustManagerProvider; + this.httpClient = new OkHttpClient.Builder() + .retryOnConnectionFailure(true) + .connectTimeout(CONNECT_TIMEOUT) + .writeTimeout(WRITE_TIMEOUT) + .readTimeout(READ_TIMEOUT) + .build(); + } + + @POST + @Path("opensearch") + public ConnectionCheckResponse status(ConnectionCheckRequest request) { + final List unknownCertificates = new LinkedList<>(); + try { + unknownCertificates.addAll(extractUnknownCertificates(request.host())); + final List indices = getAllIndicesFrom(request.host(), request.username(), request.password(), request.trustUnknownCerts()); + return ConnectionCheckResponse.success(indices, unknownCertificates); + } catch (Exception e) { + return ConnectionCheckResponse.error(e, unknownCertificates); + } + } + + List getAllIndicesFrom(final String host, final String username, final String password, boolean trustUnknownCerts) { + var url = (host.endsWith("/") ? host : host + "/") + "_cat/indices?h=index,status"; + try (var response = getClient(trustUnknownCerts).newCall(new Request.Builder().url(url).header("Authorization", Credentials.basic(username, password)).build()).execute()) { + if (response.isSuccessful() && response.body() != null) { + // filtering all indices that start with "." as they indicate a system index - we don't want to reindex those + return new BufferedReader(new StringReader(response.body().string())) + .lines() + .filter(i -> !i.startsWith(".")) + .map(this::parseIndexLine) + .sorted(Comparator.comparing(ConnectionCheckIndex::name, Comparator.naturalOrder())) + .toList(); + } else { + String message = String.format(Locale.ROOT, "Could not read list of indices from %s. Code=%d, message=%s", host, response.code(), response.message()); + throw new RuntimeException(message); + } + } catch (IOException e) { + throw new RuntimeException("Could not read list of indices from " + host + ", " + e.getMessage(), e); + } + } + + private ConnectionCheckIndex parseIndexLine(String line) { + final String[] parts = line.split("\\s+"); + return new ConnectionCheckIndex(parts[0], parts[1].contains("close")); + } + + + private OkHttpClient getClient(boolean trustUnknownCerts) { + try { + final SSLContext ctx = SSLContext.getInstance("TLS"); + final X509TrustManager trustManager = getTrustManager(trustUnknownCerts); + ctx.init(null, new TrustManager[]{trustManager}, new SecureRandom()); + return httpClient.newBuilder().sslSocketFactory(ctx.getSocketFactory(), trustManager).build(); + } catch (NoSuchAlgorithmException | KeyManagementException e) { + throw new RuntimeException(e); + + } + } + + @Nonnull + private X509TrustManager getTrustManager(boolean trustUnknownCerts) { + if (trustUnknownCerts) { + return new TrustAllX509TrustManager(); + } else { + return datanodeTrustManagerProvider.get(); + } + } + + @Nonnull + private List extractUnknownCertificates(String host) { + final UntrustedCertificateExtractor extractor = new UntrustedCertificateExtractor(httpClient); + try { + return extractor.extractUntrustedCerts(host); + } catch (NoSuchAlgorithmException | IOException | KeyManagementException e) { + throw new RuntimeException(e); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/OpensearchLockCheckController.java b/data-node/src/main/java/org/graylog/datanode/rest/OpensearchLockCheckController.java new file mode 100644 index 000000000000..0108378216ed --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/OpensearchLockCheckController.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.plugins.views.storage.migration.state.actions.OpensearchLockCheckResult; +import org.graylog.plugins.views.storage.migration.state.actions.OpensearchNodeLock; +import org.graylog2.bootstrap.preflight.PreflightCheckException; + +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.nio.channels.FileLock; +import java.nio.channels.NonWritableChannelException; +import java.nio.channels.OverlappingFileLockException; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; +import java.util.Collections; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +@Path("/lock-check") +@Produces(MediaType.APPLICATION_JSON) +public class OpensearchLockCheckController { + + private final java.nio.file.Path dataTargetDir; + + @Inject + public OpensearchLockCheckController(DatanodeConfiguration datanodeConfiguration) { + this(datanodeConfiguration.datanodeDirectories().getDataTargetDir()); + } + + public OpensearchLockCheckController(java.nio.file.Path dataTargetDir) { + this.dataTargetDir = dataTargetDir; + } + + @GET + public OpensearchLockCheckResult checkLockFiles() { + final java.nio.file.Path nodesDir = dataTargetDir.resolve("nodes"); + if (Files.isDirectory(nodesDir)) { + try (final Stream nodes = Files.list(nodesDir)) { + return nodes.map(n -> new OpensearchNodeLock(n, isDirLocked(n))) + .collect(Collectors.collectingAndThen(Collectors.toList(), OpensearchLockCheckResult::new)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } else { + return new OpensearchLockCheckResult(Collections.emptyList()); + } + } + + private static boolean isDirLocked(java.nio.file.Path nodeDir) { + final java.nio.file.Path lockFile = nodeDir.resolve("node.lock"); + if (Files.exists(lockFile)) { + try (FileChannel channel = FileChannel.open(lockFile, StandardOpenOption.WRITE)) { + final FileLock fileLock = channel.tryLock(); + if (fileLock != null) { // file was not locked, we are good to go, let's release immediately + fileLock.release(); + return false; + } else { + return true; + } + } catch (OverlappingFileLockException e) { + return true; + } catch (NonWritableChannelException | IOException e) { + throw new PreflightCheckException("Failed to verify free node.lock file", e); + } + } + return false; + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/RestBindings.java b/data-node/src/main/java/org/graylog/datanode/rest/RestBindings.java new file mode 100644 index 000000000000..921b498a3a0a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/RestBindings.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import org.graylog2.plugin.inject.Graylog2Module; + +public class RestBindings extends Graylog2Module { + @Override + protected void configure() { + addSystemRestResource(StatusController.class); + addSystemRestResource(LogsController.class); + addSystemRestResource(ManagementController.class); + addSystemRestResource(IndicesDirectoryController.class); + addSystemRestResource(OpensearchConnectionCheckController.class); + addSystemRestResource(IndexStateController.class); + addSystemRestResource(CertificatesController.class); + addSystemRestResource(OpensearchLockCheckController.class); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/StatusController.java b/data-node/src/main/java/org/graylog/datanode/rest/StatusController.java new file mode 100644 index 000000000000..0dea32a4d610 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/StatusController.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog2.plugin.Version; + +import jakarta.inject.Inject; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +@Path("/") +@Produces(MediaType.APPLICATION_JSON) +public class StatusController { + + private final Version version = Version.CURRENT_CLASSPATH; + + private final DatanodeConfiguration datanodeConfiguration; + private final OpensearchProcess openSearch; + + @Inject + public StatusController(DatanodeConfiguration datanodeConfiguration, OpensearchProcess openSearch) { + this.datanodeConfiguration = datanodeConfiguration; + this.openSearch = openSearch; + } + + @GET + public DataNodeStatus status() { + return new DataNodeStatus( + version, + new StatusResponse(datanodeConfiguration.opensearchDistributionProvider().get().version(),openSearch.processInfo()), + datanodeConfiguration.datanodeDirectories() + ); + } + +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/StatusResponse.java b/data-node/src/main/java/org/graylog/datanode/rest/StatusResponse.java new file mode 100644 index 000000000000..fb85bb3936f3 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/StatusResponse.java @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import org.graylog.datanode.opensearch.OpensearchInfo; + +public record StatusResponse(String opensearchVersion, OpensearchInfo node) {} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/config/OnlyInSecuredNode.java b/data-node/src/main/java/org/graylog/datanode/rest/config/OnlyInSecuredNode.java new file mode 100644 index 000000000000..18e2939a9c0a --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/config/OnlyInSecuredNode.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest.config; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface OnlyInSecuredNode { +} diff --git a/data-node/src/main/java/org/graylog/datanode/rest/config/SecuredNodeAnnotationFilter.java b/data-node/src/main/java/org/graylog/datanode/rest/config/SecuredNodeAnnotationFilter.java new file mode 100644 index 000000000000..dfd566edb179 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/rest/config/SecuredNodeAnnotationFilter.java @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest.config; + +import org.glassfish.jersey.server.ContainerRequest; +import org.glassfish.jersey.server.model.ResourceMethod; + +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerRequestFilter; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; + +import java.io.IOException; +import java.util.Objects; + +public class SecuredNodeAnnotationFilter implements ContainerRequestFilter { + + private final boolean isInsecureNode; + + public SecuredNodeAnnotationFilter(boolean isInsecureNode) { + this.isInsecureNode = isInsecureNode; + } + + @Override + public void filter(ContainerRequestContext requestContext) throws IOException { + if (requestContext instanceof ContainerRequest request) { + + final ResourceMethod method = request.getUriInfo().getMatchedResourceMethod(); + final OnlyInSecuredNode annotation = method.getInvocable() + .getHandlingMethod().getAnnotation(OnlyInSecuredNode.class); + + if (Objects.nonNull(annotation) && isInsecureNode) { + requestContext.abortWith(Response.status(Response.Status.UNAUTHORIZED) + .entity("This resource can only be accessed in secured data nodes.") + .type(MediaType.TEXT_PLAIN_TYPE) + .build()); + } + + } else { + requestContext.abortWith(Response.status(Response.Status.INTERNAL_SERVER_ERROR) + .entity("Server is no Jetty Server.") + .type(MediaType.TEXT_PLAIN_TYPE) + .build()); + } + + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/shared/system/activities/DataNodeActivityWriter.java b/data-node/src/main/java/org/graylog/datanode/shared/system/activities/DataNodeActivityWriter.java new file mode 100644 index 000000000000..95434d44bbf5 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/shared/system/activities/DataNodeActivityWriter.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.shared.system.activities; + +import org.graylog2.shared.system.activities.Activity; +import org.graylog2.shared.system.activities.ActivityWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DataNodeActivityWriter implements ActivityWriter { + private static final Logger LOG = LoggerFactory.getLogger(DataNodeActivityWriter.class); + + @Override + public void write(Activity activity) { + LOG.debug("Activity: {}", activity); + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdown.java b/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdown.java new file mode 100644 index 000000000000..eb18fa28b16c --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdown.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.shutdown; + +import org.graylog.datanode.Configuration; +import org.graylog.datanode.initializers.JerseyService; +import org.graylog.datanode.initializers.PeriodicalsService; +import org.graylog2.shared.system.activities.Activity; +import org.graylog2.shared.system.activities.ActivityWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; + + +@Singleton +public class GracefulShutdown implements Runnable { + private static final Logger LOG = LoggerFactory.getLogger(GracefulShutdown.class); + + private final ActivityWriter activityWriter; + private final PeriodicalsService periodicalsService; + private final JerseyService jerseyService; + private final GracefulShutdownService gracefulShutdownService; + + @Inject + public GracefulShutdown(ActivityWriter activityWriter, + Configuration configuration, + PeriodicalsService periodicalsService, + JerseyService jerseyService, + GracefulShutdownService gracefulShutdownService) { + this.activityWriter = activityWriter; + this.periodicalsService = periodicalsService; + this.jerseyService = jerseyService; + this.gracefulShutdownService = gracefulShutdownService; + } + + @Override + public void run() { + doRun(true); + } + + public void runWithoutExit() { + doRun(false); + } + + private void doRun(boolean exit) { + LOG.info("Graceful shutdown initiated."); + + activityWriter.write(new Activity("Graceful shutdown initiated.", GracefulShutdown.class)); + + // Stop REST API service to avoid changes from outside. + jerseyService.stopAsync(); + + // Stop all services that registered with the shutdown service (e.g. plugins) + // This must run after the BufferSynchronizerService shutdown to make sure the buffers are empty. + gracefulShutdownService.stopAsync(); + + // stop all maintenance tasks + periodicalsService.stopAsync().awaitTerminated(); + + // Wait until the shutdown service is done + gracefulShutdownService.awaitTerminated(); + + // Shut down hard with no shutdown hooks running. + LOG.info("Goodbye."); + if (exit) { + System.exit(0); + } + } +} diff --git a/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdownHook.java b/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdownHook.java new file mode 100644 index 000000000000..f5bfa3a0ef87 --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdownHook.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.shutdown; + +/** + * Services can implement this to participate in a graceful shutdown of the server. + *

+ * A service can register itself with the {@link GracefulShutdownService} like this: + * + *

 {@code
+ * class MyService implements GracefulShutdownHook {
+ *     private final GracefulShutdownService shutdownService;
+ *
+ *     @Inject
+ *     public MyService(GracefulShutdownService shutdownService) {
+ *         this.shutdownService = shutdownService;
+ *     }
+ *
+ *     // This will be executed by the GracefulShutdownService on server shutdown
+ *     @Override
+ *     void doGracefulShutdown() throws Exception {
+ *         runShutdownTasks();
+ *     }
+ *
+ *     public void start() {
+ *         // Let the GracefulShutdownService know about this service
+ *         this.shutdownService.register(this);
+ *     }
+ *
+ *     // This will be executed by some service manager when this service is stopped
+ *     public void stop() {
+ *         // Remove this service from the GracefulShutdownService because it's stopped before server shutdown and
+ *         // we don't need any graceful shutdown for it anymore
+ *         this.shutdownService.unregister(this);
+ *         runShutdownTasks();
+ *     }
+ *
+ *     private void runShutdownTasks() {
+ *         // Run the actual shutdown tasks for the service here
+ *     }
+ * }
+ * }
+ */ +public interface GracefulShutdownHook { + /** + * Execute shutdown tasks for the service that implements this interface. + *

Warning:

+ *
    + *
  • This method is called from another thread so the class that implements {@link GracefulShutdownHook} must be thread-safe.
  • + *
  • The server shutdown is waiting for this method call to complete. Blocking this method will block the server + * shutdown!
  • + *
+ * + * @throws Exception + */ + void doGracefulShutdown() throws Exception; +} diff --git a/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdownService.java b/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdownService.java new file mode 100644 index 000000000000..ba7ed9e6ee2c --- /dev/null +++ b/data-node/src/main/java/org/graylog/datanode/shutdown/GracefulShutdownService.java @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.shutdown; + +import com.google.common.base.Stopwatch; +import com.google.common.util.concurrent.AbstractIdleService; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Singleton; + +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import static java.util.Objects.requireNonNull; + +/** + * A service that participates in the Graylog server graceful shutdown. + *

+ * Services can implement {@link GracefulShutdownHook} and register themselves with this service to make sure they + * get shut down properly on server shutdown. During shutdown the registered hooks will be called in no particular + * order. + *

+ * Make sure to use {@link #unregister(GracefulShutdownHook)} if a registered service is shutting down before the + * server shutdown to avoid leaking service instances in the {@link GracefulShutdownService}. + * + * See {@link GracefulShutdownHook} for an example. + */ +@Singleton +public class GracefulShutdownService extends AbstractIdleService { + private static final Logger LOG = LoggerFactory.getLogger(GracefulShutdownService.class); + + private final Set shutdownHooks = ConcurrentHashMap.newKeySet(); + private final AtomicBoolean isShuttingDown = new AtomicBoolean(false); + + @Override + protected void startUp() { + // Nothing to do + } + + @Override + protected void shutDown() { + // Don't do anything if the shutdown is already in progress or if there are no hooks registered + if (isShuttingDown.getAndSet(true) || shutdownHooks.isEmpty()) { + return; + } + + try { + // Use an executor to run the shutdown hooks in parallel but don't start too many threads + // TODO: Make max number of threads user configurable + final ExecutorService executor = executorService(Math.min(shutdownHooks.size(), 10)); + final CountDownLatch latch = new CountDownLatch(shutdownHooks.size()); + + LOG.info("Running graceful shutdown for <{}> shutdown hooks", shutdownHooks.size()); + for (final GracefulShutdownHook shutdownHook : shutdownHooks) { + executor.submit(() -> { + final String hookName = shutdownHook.getClass().getSimpleName(); + try { + LOG.info("Initiate shutdown for <{}>", hookName); + final Stopwatch stopwatch = Stopwatch.createStarted(); + shutdownHook.doGracefulShutdown(); + LOG.info("Finished shutdown for <{}>, took {} ms", hookName, stopwatch.stop().elapsed(TimeUnit.MILLISECONDS)); + } catch (Exception e) { + LOG.error("Problem shutting down <{}>", hookName, e); + } finally { + latch.countDown(); + } + }); + } + + latch.await(); + executor.shutdown(); + } catch (Exception e) { + LOG.error("Problem shutting down registered hooks", e); + } + } + + /** + * Register a shutdown hook with the service. + * + * @param shutdownHook a class that implements {@link GracefulShutdownHook} + * @throws IllegalStateException if the server shutdown is already in progress and the hook cannot be registered + * @throws NullPointerException if the shutdown hook argument is null + */ + public void register(GracefulShutdownHook shutdownHook) { + if (isShuttingDown.get()) { + // Avoid any changes to the shutdown hooks set when the shutdown is already in progress + throw new IllegalStateException("Couldn't register shutdown hook because shutdown is already in progress"); + } + shutdownHooks.add(requireNonNull(shutdownHook, "shutdownHook cannot be null")); + } + + /** + * Remove a previously registered shutdown hook from the service. + *

+ * This needs to be called if a registered service will be stopped before the server shuts down. + * + * @param shutdownHook a class that implements {@link GracefulShutdownHook} + * @throws IllegalStateException if the server shutdown is already in progress and the hook cannot be unregistered + * @throws NullPointerException if the shutdown hook argument is null + */ + public void unregister(GracefulShutdownHook shutdownHook) { + if (isShuttingDown.get()) { + // Avoid any changes to the shutdown hooks set when the shutdown is already in progress + throw new IllegalStateException("Couldn't unregister shutdown hook because shutdown is already in progress"); + } + shutdownHooks.remove(requireNonNull(shutdownHook, "shutdownHook cannot be null")); + } + + private ExecutorService executorService(final int maxThreads) { + return new ThreadPoolExecutor(maxThreads, + maxThreads, + 60L, TimeUnit.SECONDS, + new LinkedBlockingQueue<>(), + new ThreadFactoryBuilder() + .setNameFormat("graceful-shutdown-service-%d") + .setUncaughtExceptionHandler((t, e) -> LOG.error("Uncaught exception in <{}>", t, e)) + .build()); + } +} diff --git a/data-node/src/main/resources/META-INF/services/org.glassfish.hk2.extension.ServiceLocatorGenerator b/data-node/src/main/resources/META-INF/services/org.glassfish.hk2.extension.ServiceLocatorGenerator new file mode 100644 index 000000000000..8c90333f0703 --- /dev/null +++ b/data-node/src/main/resources/META-INF/services/org.glassfish.hk2.extension.ServiceLocatorGenerator @@ -0,0 +1 @@ +org.graylog2.shared.bindings.Graylog2ServiceLocatorGenerator \ No newline at end of file diff --git a/data-node/src/main/resources/META-INF/services/org.graylog2.bootstrap.CliCommandsProvider b/data-node/src/main/resources/META-INF/services/org.graylog2.bootstrap.CliCommandsProvider new file mode 100644 index 000000000000..1b38f84314c6 --- /dev/null +++ b/data-node/src/main/resources/META-INF/services/org.graylog2.bootstrap.CliCommandsProvider @@ -0,0 +1 @@ +org.graylog.datanode.commands.DatanodeCommandsProvider diff --git a/data-node/src/main/resources/git.properties b/data-node/src/main/resources/git.properties new file mode 100644 index 000000000000..69d474490a42 --- /dev/null +++ b/data-node/src/main/resources/git.properties @@ -0,0 +1,2 @@ +git.branch=${scmBranch} +git.commit.id=${buildNumber} diff --git a/data-node/src/main/resources/log4j2.xml b/data-node/src/main/resources/log4j2.xml new file mode 100644 index 000000000000..aa8408e33dcf --- /dev/null +++ b/data-node/src/main/resources/log4j2.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/data-node/src/main/resources/opensearch/config/jvm.options b/data-node/src/main/resources/opensearch/config/jvm.options new file mode 100644 index 000000000000..13fca96bad5c --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/jvm.options @@ -0,0 +1,80 @@ +## JVM configuration + +################################################################ +## IMPORTANT: JVM heap size +################################################################ +## +## You should always set the min and max JVM heap +## size to the same value. For example, to set +## the heap to 4 GB, set: +## +## -Xms4g +## -Xmx4g +## +## See https://opensearch.org/docs/opensearch/install/important-settings/ +## for more information +## +################################################################ + +# Xms represents the initial size of total heap space +# Xmx represents the maximum size of total heap space + +-Xms1g +-Xmx1g + +################################################################ +## Expert settings +################################################################ +## +## All settings below this section are considered +## expert settings. Don't tamper with them unless +## you understand what you are doing +## +################################################################ + +## GC configuration +8-10:-XX:+UseConcMarkSweepGC +8-10:-XX:CMSInitiatingOccupancyFraction=75 +8-10:-XX:+UseCMSInitiatingOccupancyOnly + +## G1GC Configuration +# NOTE: G1 GC is only supported on JDK version 10 or later +# to use G1GC, uncomment the next two lines and update the version on the +# following three lines to your version of the JDK +# 10:-XX:-UseConcMarkSweepGC +# 10:-XX:-UseCMSInitiatingOccupancyOnly +11-:-XX:+UseG1GC +11-:-XX:G1ReservePercent=25 +11-:-XX:InitiatingHeapOccupancyPercent=30 + +## JVM temporary directory +-Djava.io.tmpdir=${OPENSEARCH_TMPDIR} + +## heap dumps + +# generate a heap dump when an allocation from the Java heap fails +# heap dumps are created in the working directory of the JVM +-XX:+HeapDumpOnOutOfMemoryError + +# specify an alternative path for heap dumps; ensure the directory exists and +# has sufficient space +-XX:HeapDumpPath=data + +# specify an alternative path for JVM fatal error logs +-XX:ErrorFile=/tmp/hs_err_pid%p.log + +## JDK 8 GC logging +8:-XX:+PrintGCDetails +8:-XX:+PrintGCDateStamps +8:-XX:+PrintTenuringDistribution +8:-XX:+PrintGCApplicationStoppedTime +8:-Xloggc:/tmp/gc.log +8:-XX:+UseGCLogFileRotation +8:-XX:NumberOfGCLogFiles=32 +8:-XX:GCLogFileSize=64m + +# JDK 9+ GC logging +9-:-Xlog:gc*,gc+age=trace,safepoint:file=/tmp/gc.log:utctime,pid,tags:filecount=32,filesize=64m + +# Explicitly allow security manager (https://bugs.openjdk.java.net/browse/JDK-8270380) +18-:-Djava.security.manager=allow diff --git a/data-node/src/main/resources/opensearch/config/log4j2.properties b/data-node/src/main/resources/opensearch/config/log4j2.properties new file mode 100644 index 000000000000..bb27aaf2e22e --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/log4j2.properties @@ -0,0 +1,234 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# + +status = error + +appender.console.type = Console +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n + +######## Server JSON ############################ +appender.rolling.type = RollingFile +appender.rolling.name = rolling +appender.rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_server.json +appender.rolling.filePermissions = rw-r----- +appender.rolling.layout.type = OpenSearchJsonLayout +appender.rolling.layout.type_name = server + +appender.rolling.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}-%d{yyyy-MM-dd}-%i.json.gz +appender.rolling.policies.type = Policies +appender.rolling.policies.time.type = TimeBasedTriggeringPolicy +appender.rolling.policies.time.interval = 1 +appender.rolling.policies.time.modulate = true +appender.rolling.policies.size.type = SizeBasedTriggeringPolicy +appender.rolling.policies.size.size = 128MB +appender.rolling.strategy.type = DefaultRolloverStrategy +appender.rolling.strategy.fileIndex = nomax +appender.rolling.strategy.action.type = Delete +appender.rolling.strategy.action.basepath = ${sys:opensearch.logs.base_path} +appender.rolling.strategy.action.condition.type = IfFileName +appender.rolling.strategy.action.condition.glob = ${sys:opensearch.logs.cluster_name}-* +appender.rolling.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize +appender.rolling.strategy.action.condition.nested_condition.exceeds = 2GB +################################################ +######## Server - old style pattern ########### +appender.rolling_old.type = RollingFile +appender.rolling_old.name = rolling_old +appender.rolling_old.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}.log +appender.rolling_old.filePermissions = rw-r----- +appender.rolling_old.layout.type = PatternLayout +appender.rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n + +appender.rolling_old.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz +appender.rolling_old.policies.type = Policies +appender.rolling_old.policies.time.type = TimeBasedTriggeringPolicy +appender.rolling_old.policies.time.interval = 1 +appender.rolling_old.policies.time.modulate = true +appender.rolling_old.policies.size.type = SizeBasedTriggeringPolicy +appender.rolling_old.policies.size.size = 128MB +appender.rolling_old.strategy.type = DefaultRolloverStrategy +appender.rolling_old.strategy.fileIndex = nomax +appender.rolling_old.strategy.action.type = Delete +appender.rolling_old.strategy.action.basepath = ${sys:opensearch.logs.base_path} +appender.rolling_old.strategy.action.condition.type = IfFileName +appender.rolling_old.strategy.action.condition.glob = ${sys:opensearch.logs.cluster_name}-* +appender.rolling_old.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize +appender.rolling_old.strategy.action.condition.nested_condition.exceeds = 2GB +################################################ + +rootLogger.level = info +rootLogger.appenderRef.console.ref = console +rootLogger.appenderRef.rolling.ref = rolling +rootLogger.appenderRef.rolling_old.ref = rolling_old + +######## Deprecation JSON ####################### +appender.deprecation_rolling.type = RollingFile +appender.deprecation_rolling.name = deprecation_rolling +appender.deprecation_rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation.json +appender.deprecation_rolling.filePermissions = rw-r----- +appender.deprecation_rolling.layout.type = OpenSearchJsonLayout +appender.deprecation_rolling.layout.type_name = deprecation +appender.deprecation_rolling.layout.opensearchmessagefields=x-opaque-id +appender.deprecation_rolling.filter.rate_limit.type = RateLimitingFilter + +appender.deprecation_rolling.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation-%i.json.gz +appender.deprecation_rolling.policies.type = Policies +appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy +appender.deprecation_rolling.policies.size.size = 1GB +appender.deprecation_rolling.strategy.type = DefaultRolloverStrategy +appender.deprecation_rolling.strategy.max = 4 + +appender.header_warning.type = HeaderWarningAppender +appender.header_warning.name = header_warning +################################################# +######## Deprecation - old style pattern ####### +appender.deprecation_rolling_old.type = RollingFile +appender.deprecation_rolling_old.name = deprecation_rolling_old +appender.deprecation_rolling_old.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_deprecation.log +appender.deprecation_rolling_old.filePermissions = rw-r----- +appender.deprecation_rolling_old.layout.type = PatternLayout +appender.deprecation_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n + +appender.deprecation_rolling_old.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\ + _deprecation-%i.log.gz +appender.deprecation_rolling_old.policies.type = Policies +appender.deprecation_rolling_old.policies.size.type = SizeBasedTriggeringPolicy +appender.deprecation_rolling_old.policies.size.size = 1GB +appender.deprecation_rolling_old.strategy.type = DefaultRolloverStrategy +appender.deprecation_rolling_old.strategy.max = 4 +################################################# +logger.deprecation.name = org.opensearch.deprecation +logger.deprecation.level = deprecation +logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling +logger.deprecation.appenderRef.deprecation_rolling_old.ref = deprecation_rolling_old +logger.deprecation.appenderRef.header_warning.ref = header_warning +logger.deprecation.additivity = false + +######## Search slowlog JSON #################### +appender.index_search_slowlog_rolling.type = RollingFile +appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling +appender.index_search_slowlog_rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs\ + .cluster_name}_index_search_slowlog.json +appender.index_search_slowlog_rolling.filePermissions = rw-r----- +appender.index_search_slowlog_rolling.layout.type = OpenSearchJsonLayout +appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog +appender.index_search_slowlog_rolling.layout.opensearchmessagefields=message,took,took_millis,total_hits,types,stats,search_type,total_shards,source,id + +appender.index_search_slowlog_rolling.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs\ + .cluster_name}_index_search_slowlog-%i.json.gz +appender.index_search_slowlog_rolling.policies.type = Policies +appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy +appender.index_search_slowlog_rolling.policies.size.size = 1GB +appender.index_search_slowlog_rolling.strategy.type = DefaultRolloverStrategy +appender.index_search_slowlog_rolling.strategy.max = 4 +################################################# +######## Search slowlog - old style pattern #### +appender.index_search_slowlog_rolling_old.type = RollingFile +appender.index_search_slowlog_rolling_old.name = index_search_slowlog_rolling_old +appender.index_search_slowlog_rolling_old.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\ + _index_search_slowlog.log +appender.index_search_slowlog_rolling_old.filePermissions = rw-r----- +appender.index_search_slowlog_rolling_old.layout.type = PatternLayout +appender.index_search_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n + +appender.index_search_slowlog_rolling_old.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\ + _index_search_slowlog-%i.log.gz +appender.index_search_slowlog_rolling_old.policies.type = Policies +appender.index_search_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy +appender.index_search_slowlog_rolling_old.policies.size.size = 1GB +appender.index_search_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy +appender.index_search_slowlog_rolling_old.strategy.max = 4 +################################################# +logger.index_search_slowlog_rolling.name = index.search.slowlog +logger.index_search_slowlog_rolling.level = trace +logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling +logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling_old.ref = index_search_slowlog_rolling_old +logger.index_search_slowlog_rolling.additivity = false + +######## Indexing slowlog JSON ################## +appender.index_indexing_slowlog_rolling.type = RollingFile +appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling +appender.index_indexing_slowlog_rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\ + _index_indexing_slowlog.json +appender.index_indexing_slowlog_rolling.filePermissions = rw-r----- +appender.index_indexing_slowlog_rolling.layout.type = OpenSearchJsonLayout +appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog +appender.index_indexing_slowlog_rolling.layout.opensearchmessagefields=message,took,took_millis,doc_type,id,routing,source + +appender.index_indexing_slowlog_rolling.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\ + _index_indexing_slowlog-%i.json.gz +appender.index_indexing_slowlog_rolling.policies.type = Policies +appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy +appender.index_indexing_slowlog_rolling.policies.size.size = 1GB +appender.index_indexing_slowlog_rolling.strategy.type = DefaultRolloverStrategy +appender.index_indexing_slowlog_rolling.strategy.max = 4 +################################################# +######## Indexing slowlog - old style pattern ## +appender.index_indexing_slowlog_rolling_old.type = RollingFile +appender.index_indexing_slowlog_rolling_old.name = index_indexing_slowlog_rolling_old +appender.index_indexing_slowlog_rolling_old.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\ + _index_indexing_slowlog.log +appender.index_indexing_slowlog_rolling_old.filePermissions = rw-r----- +appender.index_indexing_slowlog_rolling_old.layout.type = PatternLayout +appender.index_indexing_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n + +appender.index_indexing_slowlog_rolling_old.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}\ + _index_indexing_slowlog-%i.log.gz +appender.index_indexing_slowlog_rolling_old.policies.type = Policies +appender.index_indexing_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy +appender.index_indexing_slowlog_rolling_old.policies.size.size = 1GB +appender.index_indexing_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy +appender.index_indexing_slowlog_rolling_old.strategy.max = 4 +################################################# + +logger.index_indexing_slowlog.name = index.indexing.slowlog.index +logger.index_indexing_slowlog.level = trace +logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling +logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling_old.ref = index_indexing_slowlog_rolling_old +logger.index_indexing_slowlog.additivity = false + +######## Task details log JSON #################### +appender.task_detailslog_rolling.type = RollingFile +appender.task_detailslog_rolling.name = task_detailslog_rolling +appender.task_detailslog_rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog.json +appender.task_detailslog_rolling.filePermissions = rw-r----- +appender.task_detailslog_rolling.layout.type = OpenSearchJsonLayout +appender.task_detailslog_rolling.layout.type_name = task_detailslog +appender.task_detailslog_rolling.layout.opensearchmessagefields=taskId,type,action,description,start_time_millis,resource_stats,metadata + +appender.task_detailslog_rolling.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog-%i.json.gz +appender.task_detailslog_rolling.policies.type = Policies +appender.task_detailslog_rolling.policies.size.type = SizeBasedTriggeringPolicy +appender.task_detailslog_rolling.policies.size.size = 1GB +appender.task_detailslog_rolling.strategy.type = DefaultRolloverStrategy +appender.task_detailslog_rolling.strategy.max = 4 +################################################# +######## Task details log - old style pattern #### +appender.task_detailslog_rolling_old.type = RollingFile +appender.task_detailslog_rolling_old.name = task_detailslog_rolling_old +appender.task_detailslog_rolling_old.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog.log +appender.task_detailslog_rolling_old.filePermissions = rw-r----- +appender.task_detailslog_rolling_old.layout.type = PatternLayout +appender.task_detailslog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n + +appender.task_detailslog_rolling_old.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog-%i.log.gz +appender.task_detailslog_rolling_old.policies.type = Policies +appender.task_detailslog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy +appender.task_detailslog_rolling_old.policies.size.size = 1GB +appender.task_detailslog_rolling_old.strategy.type = DefaultRolloverStrategy +appender.task_detailslog_rolling_old.strategy.max = 4 +################################################# +logger.task_detailslog_rolling.name = task.detailslog +logger.task_detailslog_rolling.level = trace +logger.task_detailslog_rolling.appenderRef.task_detailslog_rolling.ref = task_detailslog_rolling +logger.task_detailslog_rolling.appenderRef.task_detailslog_rolling_old.ref = task_detailslog_rolling_old +logger.task_detailslog_rolling.additivity = false diff --git a/data-node/src/main/resources/opensearch/config/opensearch-observability/observability.yml b/data-node/src/main/resources/opensearch/config/opensearch-observability/observability.yml new file mode 100644 index 000000000000..6bc2c48720cc --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-observability/observability.yml @@ -0,0 +1,28 @@ +--- +## +# Copyright OpenSearch Contributors +# SPDX-License-Identifier: Apache-2.0 +## + +# configuration file for the observability plugin +opensearch.notebooks: + general: + operationTimeoutMs: 60000 # 60 seconds, Minimum 100ms + defaultItemsQueryCount: 100 # default number of items to query + polling: + jobLockDurationSeconds: 300 # 5 Minutes, Minimum 10 seconds + minPollingDurationSeconds: 300 # 5 Minutes, Minimum 60 seconds + maxPollingDurationSeconds: 900 # 15 Minutes, Minimum 5 Minutes + maxLockRetries: 1 # Max number of retries to retry locking + access: + adminAccess: "AllObservabilityObjects" + # adminAccess values: + ## Standard -> Admin user access follows standard user + ## AllObservabilityObjects -> Admin user with "all_access" role can see all observability objects of all users. + filterBy: "NoFilter" # Applied when tenant != __user__ + # filterBy values: + ## NoFilter -> everyone see each other's observability objects + ## User -> observability objects are visible to only themselves + ## Roles -> observability objects are visible to users having any one of the role of creator + ## BackendRoles -> observability objects are visible to users having any one of the backend role of creator + ignoreRoles: ["own_index", "kibana_user", "observability_full_access", "observability_read_access"] diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/action_groups.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/action_groups.yml new file mode 100644 index 000000000000..7c40612b8363 --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/action_groups.yml @@ -0,0 +1,3 @@ +_meta: + type: "actiongroups" + config_version: 2 diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/allowlist.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/allowlist.yml new file mode 100644 index 000000000000..e669557d7ee9 --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/allowlist.yml @@ -0,0 +1,69 @@ +--- +_meta: + type: "allowlist" + config_version: 2 + +# Description: +# enabled - feature flag. +# if enabled is false, the allowlisting feature is removed. +# This is like removing the check that checks if an API is allowlisted. +# This is equivalent to continuing with the usual access control checks, and removing all the code that implements allowlisting. +# if enabled is true, then all users except SuperAdmin can access only the APIs in requests +# SuperAdmin can access all APIs. +# SuperAdmin is defined by the SuperAdmin certificate, which is configured in the opensearch.yml setting: plugins.security.authcz.admin_dn: +# Refer to the example setting in opensearch.yml.example, and the opendistro documentation to know more about configuring SuperAdmin. +# +# requests - map of allowlisted endpoints, and the allowlisted HTTP requests for those endpoints + +# Examples showing how to configure this yml file (make sure the _meta data from above is also there): +# Example 1: +# To enable allowlisting and allowlist GET /_cluster/settings +# +#config: +# enabled: true +# requests: +# /_cluster/settings: +# - GET +# +# Example 2: +# If you want to allowlist multiple request methods for /_cluster/settings (GET,PUT): +# +#config: +# enabled: true +# requests: +# /_cluster/settings: +# - GET +# - PUT +# +# Example 3: +# If you want to allowlist other APIs as well, for example GET /_cat/nodes, and GET /_cat/shards: +# +#config: +# enabled: true +# requests: +# /_cluster/settings: +# - GET +# - PUT +# /_cat/nodes: +# - GET +# /_cat/shards: +# - GET +# +# Example 4: +# If you want to disable the allowlisting feature, set enabled to false. +# enabled: false +# requests: +# /_cluster/settings: +# - GET +# +#At this point, all APIs become allowlisted because the feature to allowlist is off, so requests is irrelevant. + + +#this name must be config +config: + enabled: false + requests: + /_cluster/settings: + - GET + /_cat/nodes: + - GET diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/audit.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/audit.yml new file mode 100644 index 000000000000..dcfbad8dd7fe --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/audit.yml @@ -0,0 +1,85 @@ +_meta: + type: "audit" + config_version: 2 + +config: + # enable/disable audit logging + enabled: true + + audit: + # Enable/disable REST API auditing + enable_rest: true + + # Categories to exclude from REST API auditing + disabled_rest_categories: + - AUTHENTICATED + - GRANTED_PRIVILEGES + + # Enable/disable Transport API auditing + enable_transport: true + + # Categories to exclude from Transport API auditing + disabled_transport_categories: + - AUTHENTICATED + - GRANTED_PRIVILEGES + + # Users to be excluded from auditing. Wildcard patterns are supported. Eg: + # ignore_users: ["test-user", "employee-*"] + ignore_users: + - kibanaserver + + # Requests to be excluded from auditing. Wildcard patterns are supported. Eg: + # ignore_requests: ["indices:data/read/*", "SearchRequest"] + ignore_requests: [] + + # Log individual operations in a bulk request + resolve_bulk_requests: false + + # Include the body of the request (if available) for both REST and the transport layer + log_request_body: true + + # Logs all indices affected by a request. Resolves aliases and wildcards/date patterns + resolve_indices: true + + # Exclude sensitive headers from being included in the logs. Eg: Authorization + exclude_sensitive_headers: true + + compliance: + # enable/disable compliance + enabled: true + + # Log updates to internal security changes + internal_config: true + + # Log external config files for the node + external_config: false + + # Log only metadata of the document for read events + read_metadata_only: true + + # Map of indexes and fields to monitor for read events. Wildcard patterns are supported for both index names and fields. Eg: + # read_watched_fields: { + # "twitter": ["message"] + # "logs-*": ["id", "attr*"] + # } + read_watched_fields: {} + + # List of users to ignore for read events. Wildcard patterns are supported. Eg: + # read_ignore_users: ["test-user", "employee-*"] + read_ignore_users: + - kibanaserver + + # Log only metadata of the document for write events + write_metadata_only: true + + # Log only diffs for document updates + write_log_diffs: false + + # List of indices to watch for write events. Wildcard patterns are supported + # write_watched_indices: ["twitter", "logs-*"] + write_watched_indices: [] + + # List of users to ignore for write events. Wildcard patterns are supported. Eg: + # write_ignore_users: ["test-user", "employee-*"] + write_ignore_users: + - kibanaserver diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/config.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/config.yml new file mode 100644 index 000000000000..fdbda5d80dbd --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/config.yml @@ -0,0 +1,247 @@ +--- + +# This is the main OpenSearch Security configuration file where authentication +# and authorization is defined. +# +# You need to configure at least one authentication domain in the authc of this file. +# An authentication domain is responsible for extracting the user credentials from +# the request and for validating them against an authentication backend like Active Directory for example. +# +# If more than one authentication domain is configured the first one which succeeds wins. +# If all authentication domains fail then the request is unauthenticated. +# In this case an exception is thrown and/or the HTTP status is set to 401. +# +# After authentication authorization (authz) will be applied. There can be zero or more authorizers which collect +# the roles from a given backend for the authenticated user. +# +# Both, authc and auth can be enabled/disabled separately for REST and TRANSPORT layer. Default is true for both. +# http_enabled: true +# transport_enabled: true +# +# For HTTP it is possible to allow anonymous authentication. If that is the case then the HTTP authenticators try to +# find user credentials in the HTTP request. If credentials are found then the user gets regularly authenticated. +# If none can be found the user will be authenticated as an "anonymous" user. This user has always the username "anonymous" +# and one role named "anonymous_backendrole". +# If you enable anonymous authentication all HTTP authenticators will not challenge. +# +# +# Note: If you define more than one HTTP authenticators make sure to put non-challenging authenticators like "proxy" or "clientcert" +# first and the challenging one last. +# Because it's not possible to challenge a client with two different authentication methods (for example +# Kerberos and Basic) only one can have the challenge flag set to true. You can cope with this situation +# by using pre-authentication, e.g. sending a HTTP Basic authentication header in the request. +# +# Default value of the challenge flag is true. +# +# +# HTTP +# basic (challenging) +# proxy (not challenging, needs xff) +# kerberos (challenging) +# clientcert (not challenging, needs https) +# jwt (not challenging) +# host (not challenging) #DEPRECATED, will be removed in a future version. +# host based authentication is configurable in roles_mapping + +# Authc +# internal +# noop +# ldap + +# Authz +# ldap +# noop + + + +_meta: + type: "config" + config_version: 2 + +config: + dynamic: + # Set filtered_alias_mode to 'disallow' to forbid more than 2 filtered aliases per index + # Set filtered_alias_mode to 'warn' to allow more than 2 filtered aliases per index but warns about it (default) + # Set filtered_alias_mode to 'nowarn' to allow more than 2 filtered aliases per index silently + #filtered_alias_mode: warn + #do_not_fail_on_forbidden: false + #kibana: + # Kibana multitenancy + #multitenancy_enabled: true + #server_username: kibanaserver + #index: '.kibana' + http: + anonymous_auth_enabled: false + xff: + enabled: false + internalProxies: '192\.168\.0\.10|192\.168\.0\.11' # regex pattern + #internalProxies: '.*' # trust all internal proxies, regex pattern + #remoteIpHeader: 'x-forwarded-for' + ###### see https://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html for regex help + ###### more information about XFF https://en.wikipedia.org/wiki/X-Forwarded-For + ###### and here https://tools.ietf.org/html/rfc7239 + ###### and https://tomcat.apache.org/tomcat-8.0-doc/config/valve.html#Remote_IP_Valve + authc: + kerberos_auth_domain: + http_enabled: false + transport_enabled: false + order: 6 + http_authenticator: + type: kerberos + challenge: true + config: + # If true a lot of kerberos/security related debugging output will be logged to standard out + krb_debug: false + # If true then the realm will be stripped from the user name + strip_realm_from_principal: true + authentication_backend: + type: noop + basic_internal_auth_domain: + description: "Authenticate via HTTP Basic against internal users database" + http_enabled: false + transport_enabled: false + order: 4 + http_authenticator: + type: basic + challenge: true + authentication_backend: + type: intern + proxy_auth_domain: + description: "Authenticate via proxy" + http_enabled: false + transport_enabled: false + order: 3 + http_authenticator: + type: proxy + challenge: false + config: + user_header: "x-proxy-user" + roles_header: "x-proxy-roles" + authentication_backend: + type: noop + jwt_auth_domain: + description: "Authenticate via Json Web Token" + http_enabled: true + transport_enabled: true + order: 1 + http_authenticator: + type: jwt + challenge: false + config: + signing_key: "base64 encoded HMAC key or public RSA/ECDSA pem key" + jwt_header: "Authorization" + jwt_url_parameter: null + roles_key: "os_roles" + subject_key: null + authentication_backend: + type: noop + clientcert_auth_domain: + description: "Authenticate via SSL client certificates" + http_enabled: true + transport_enabled: true + order: 0 + http_authenticator: + type: clientcert + config: + username_attribute: cn #optional, if omitted DN becomes username + challenge: false + authentication_backend: + type: noop + ldap: + description: "Authenticate via LDAP or Active Directory" + http_enabled: false + transport_enabled: false + order: 5 + http_authenticator: + type: basic + challenge: false + authentication_backend: + # LDAP authentication backend (authenticate users against a LDAP or Active Directory) + type: ldap + config: + # enable ldaps + enable_ssl: false + # enable start tls, enable_ssl should be false + enable_start_tls: false + # send client certificate + enable_ssl_client_auth: false + # verify ldap hostname + verify_hostnames: true + hosts: + - localhost:8389 + bind_dn: null + password: null + userbase: 'ou=people,dc=example,dc=com' + # Filter to search for users (currently in the whole subtree beneath userbase) + # {0} is substituted with the username + usersearch: '(sAMAccountName={0})' + # Use this attribute from the user as username (if not set then DN is used) + username_attribute: null + authz: + roles_from_myldap: + description: "Authorize via LDAP or Active Directory" + http_enabled: false + transport_enabled: false + authorization_backend: + # LDAP authorization backend (gather roles from a LDAP or Active Directory, you have to configure the above LDAP authentication backend settings too) + type: ldap + config: + # enable ldaps + enable_ssl: false + # enable start tls, enable_ssl should be false + enable_start_tls: false + # send client certificate + enable_ssl_client_auth: false + # verify ldap hostname + verify_hostnames: true + hosts: + - localhost:8389 + bind_dn: null + password: null + rolebase: 'ou=groups,dc=example,dc=com' + # Filter to search for roles (currently in the whole subtree beneath rolebase) + # {0} is substituted with the DN of the user + # {1} is substituted with the username + # {2} is substituted with an attribute value from user's directory entry, of the authenticated user. Use userroleattribute to specify the name of the attribute + rolesearch: '(member={0})' + # Specify the name of the attribute which value should be substituted with {2} above + userroleattribute: null + # Roles as an attribute of the user entry + userrolename: disabled + #userrolename: memberOf + # The attribute in a role entry containing the name of that role, Default is "name". + # Can also be "dn" to use the full DN as rolename. + rolename: cn + # Resolve nested roles transitive (roles which are members of other roles and so on ...) + resolve_nested_roles: true + userbase: 'ou=people,dc=example,dc=com' + # Filter to search for users (currently in the whole subtree beneath userbase) + # {0} is substituted with the username + usersearch: '(uid={0})' + # Skip users matching a user name, a wildcard or a regex pattern + #skip_users: + # - 'cn=Michael Jackson,ou*people,o=TEST' + # - '/\S*/' + roles_from_another_ldap: + description: "Authorize via another Active Directory" + http_enabled: false + transport_enabled: false + authorization_backend: + type: ldap + #config goes here ... + # auth_failure_listeners: + # ip_rate_limiting: + # type: ip + # allowed_tries: 10 + # time_window_seconds: 3600 + # block_expiry_seconds: 600 + # max_blocked_clients: 100000 + # max_tracked_clients: 100000 + # internal_authentication_backend_limiting: + # type: username + # authentication_backend: intern + # allowed_tries: 10 + # time_window_seconds: 3600 + # block_expiry_seconds: 600 + # max_blocked_clients: 100000 + # max_tracked_clients: 100000 diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/internal_users.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/internal_users.yml new file mode 100644 index 000000000000..5bd3a107b3d4 --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/internal_users.yml @@ -0,0 +1,9 @@ +--- +# This is the internal user database +# The hash value is a bcrypt hash and can be generated with plugin/tools/hash.sh + +_meta: + type: "internalusers" + config_version: 2 + +# all demo users have been removed, so that they don't end up in our data node by accident diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/nodes_dn.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/nodes_dn.yml new file mode 100644 index 000000000000..7f8304cf0db0 --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/nodes_dn.yml @@ -0,0 +1,8 @@ +_meta: + type: "nodesdn" + config_version: 2 + +# Define nodesdn mapping name and corresponding values +# cluster1: +# nodes_dn: +# - CN=*.example.com diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/roles.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/roles.yml new file mode 100644 index 000000000000..1d081a5fd08d --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/roles.yml @@ -0,0 +1,292 @@ +_meta: + type: "roles" + config_version: 2 + +# Restrict users so they can only view visualization and dashboard on OpenSearchDashboards +kibana_read_only: + reserved: true + +# The security REST API access role is used to assign specific users access to change the security settings through the REST API. +security_rest_api_access: + reserved: true + +# Allows users to view monitors, destinations and alerts +alerting_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/alerting/alerts/get' + - 'cluster:admin/opendistro/alerting/destination/get' + - 'cluster:admin/opendistro/alerting/monitor/get' + - 'cluster:admin/opendistro/alerting/monitor/search' + - 'cluster:admin/opensearch/alerting/findings/get' + +# Allows users to view and acknowledge alerts +alerting_ack_alerts: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/alerting/alerts/*' + +# Allows users to use all alerting functionality +alerting_full_access: + reserved: true + cluster_permissions: + - 'cluster_monitor' + - 'cluster:admin/opendistro/alerting/*' + - 'cluster:admin/opensearch/alerting/*' + - 'cluster:admin/opensearch/notifications/feature/publish' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices_monitor' + - 'indices:admin/aliases/get' + - 'indices:admin/mappings/get' + +# Allow users to read Anomaly Detection detectors and results +anomaly_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/ad/detector/info' + - 'cluster:admin/opendistro/ad/detector/search' + - 'cluster:admin/opendistro/ad/detectors/get' + - 'cluster:admin/opendistro/ad/result/search' + - 'cluster:admin/opendistro/ad/tasks/search' + - 'cluster:admin/opendistro/ad/detector/validate' + - 'cluster:admin/opendistro/ad/result/topAnomalies' + +# Allows users to use all Anomaly Detection functionality +anomaly_full_access: + reserved: true + cluster_permissions: + - 'cluster_monitor' + - 'cluster:admin/opendistro/ad/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices_monitor' + - 'indices:admin/aliases/get' + - 'indices:admin/mappings/get' + +# Allows users to read Notebooks +notebooks_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/notebooks/list' + - 'cluster:admin/opendistro/notebooks/get' + +# Allows users to all Notebooks functionality +notebooks_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/notebooks/create' + - 'cluster:admin/opendistro/notebooks/update' + - 'cluster:admin/opendistro/notebooks/delete' + - 'cluster:admin/opendistro/notebooks/get' + - 'cluster:admin/opendistro/notebooks/list' + +# Allows users to read observability objects +observability_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/observability/get' + +# Allows users to all Observability functionality +observability_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/observability/create' + - 'cluster:admin/opensearch/observability/update' + - 'cluster:admin/opensearch/observability/delete' + - 'cluster:admin/opensearch/observability/get' + +# Allows users to read and download Reports +reports_instances_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/reports/instance/list' + - 'cluster:admin/opendistro/reports/instance/get' + - 'cluster:admin/opendistro/reports/menu/download' + +# Allows users to read and download Reports and Report-definitions +reports_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/reports/definition/get' + - 'cluster:admin/opendistro/reports/definition/list' + - 'cluster:admin/opendistro/reports/instance/list' + - 'cluster:admin/opendistro/reports/instance/get' + - 'cluster:admin/opendistro/reports/menu/download' + +# Allows users to all Reports functionality +reports_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/reports/definition/create' + - 'cluster:admin/opendistro/reports/definition/update' + - 'cluster:admin/opendistro/reports/definition/on_demand' + - 'cluster:admin/opendistro/reports/definition/delete' + - 'cluster:admin/opendistro/reports/definition/get' + - 'cluster:admin/opendistro/reports/definition/list' + - 'cluster:admin/opendistro/reports/instance/list' + - 'cluster:admin/opendistro/reports/instance/get' + - 'cluster:admin/opendistro/reports/menu/download' + +# Allows users to use all asynchronous-search functionality +asynchronous_search_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/asynchronous_search/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:data/read/search*' + +# Allows users to read stored asynchronous-search results +asynchronous_search_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opendistro/asynchronous_search/get' + +# Allows user to use all index_management actions - ism policies, rollups, transforms +index_management_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/ism/*" + - "cluster:admin/opendistro/rollup/*" + - "cluster:admin/opendistro/transform/*" + - "cluster:admin/opensearch/notifications/feature/publish" + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:admin/opensearch/ism/*' + +# Allows users to use all cross cluster replication functionality at leader cluster +cross_cluster_replication_leader_full_access: + reserved: true + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - "indices:admin/plugins/replication/index/setup/validate" + - "indices:data/read/plugins/replication/changes" + - "indices:data/read/plugins/replication/file_chunk" + +# Allows users to use all cross cluster replication functionality at follower cluster +cross_cluster_replication_follower_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/plugins/replication/autofollow/update" + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - "indices:admin/plugins/replication/index/setup/validate" + - "indices:data/write/plugins/replication/changes" + - "indices:admin/plugins/replication/index/start" + - "indices:admin/plugins/replication/index/pause" + - "indices:admin/plugins/replication/index/resume" + - "indices:admin/plugins/replication/index/stop" + - "indices:admin/plugins/replication/index/update" + - "indices:admin/plugins/replication/index/status_check" + +# Allow users to read ML stats/models/tasks +ml_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/ml/stats/nodes' + - 'cluster:admin/opensearch/ml/models/get' + - 'cluster:admin/opensearch/ml/models/search' + - 'cluster:admin/opensearch/ml/tasks/get' + - 'cluster:admin/opensearch/ml/tasks/search' + +# Allows users to use all ML functionality +ml_full_access: + reserved: true + cluster_permissions: + - 'cluster_monitor' + - 'cluster:admin/opensearch/ml/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices_monitor' + +# Allows users to use all Notifications functionality +notifications_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/notifications/*' + +# Allows users to read Notifications config/channels +notifications_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/notifications/configs/get' + - 'cluster:admin/opensearch/notifications/features' + - 'cluster:admin/opensearch/notifications/channels/get' + +# Allows users to use all snapshot management functionality +snapshot_management_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/snapshot_management/*' + - 'cluster:admin/opensearch/notifications/feature/publish' + - 'cluster:admin/repository/*' + - 'cluster:admin/snapshot/*' + +# Allows users to see snapshots, repositories, and snapshot management policies +snapshot_management_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/snapshot_management/policy/get' + - 'cluster:admin/opensearch/snapshot_management/policy/search' + - 'cluster:admin/opensearch/snapshot_management/policy/explain' + - 'cluster:admin/repository/get' + - 'cluster:admin/snapshot/get' + +# Allows user to use point in time functionality +point_in_time_full_access: + reserved: true + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'manage_point_in_time' + +# Allows users to see security analytics detectors and others +security_analytics_read_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/securityanalytics/alerts/get' + - 'cluster:admin/opensearch/securityanalytics/detector/get' + - 'cluster:admin/opensearch/securityanalytics/detector/search' + - 'cluster:admin/opensearch/securityanalytics/findings/get' + - 'cluster:admin/opensearch/securityanalytics/mapping/get' + - 'cluster:admin/opensearch/securityanalytics/mapping/view/get' + - 'cluster:admin/opensearch/securityanalytics/rule/get' + - 'cluster:admin/opensearch/securityanalytics/rule/search' + +# Allows users to use all security analytics functionality +security_analytics_full_access: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/securityanalytics/alerts/*' + - 'cluster:admin/opensearch/securityanalytics/detector/*' + - 'cluster:admin/opensearch/securityanalytics/findings/*' + - 'cluster:admin/opensearch/securityanalytics/mapping/*' + - 'cluster:admin/opensearch/securityanalytics/rule/*' + index_permissions: + - index_patterns: + - '*' + allowed_actions: + - 'indices:admin/mapping/put' + - 'indices:admin/mappings/get' + +# Allows users to view and acknowledge alerts +security_analytics_ack_alerts: + reserved: true + cluster_permissions: + - 'cluster:admin/opensearch/securityanalytics/alerts/*' diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/roles_mapping.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/roles_mapping.yml new file mode 100644 index 000000000000..89f46bf2602f --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/roles_mapping.yml @@ -0,0 +1,49 @@ +--- +# In this file users, backendroles and hosts can be mapped to Security roles. +# Permissions for OpenSearch roles are configured in roles.yml + +_meta: + type: "rolesmapping" + config_version: 2 + +# Define your roles mapping here + +## Demo roles mapping + +all_access: + reserved: false + backend_roles: + - "admin" + description: "Maps admin to all_access" + +own_index: + reserved: false + users: + - "*" + description: "Allow full access to an index named like the username" + +logstash: + reserved: false + backend_roles: + - "logstash" + +kibana_user: + reserved: false + backend_roles: + - "kibanauser" + description: "Maps kibanauser to kibana_user" + +readall: + reserved: false + backend_roles: + - "readall" + +manage_snapshots: + reserved: false + backend_roles: + - "snapshotrestore" + +kibana_server: + reserved: true + users: + - "kibanaserver" diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/tenants.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/tenants.yml new file mode 100644 index 000000000000..04104dce00de --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/tenants.yml @@ -0,0 +1,11 @@ +--- +_meta: + type: "tenants" + config_version: 2 + +# Define your tenants here + +## Demo tenants +admin_tenant: + reserved: false + description: "Demo tenant for admin user" diff --git a/data-node/src/main/resources/opensearch/config/opensearch-security/whitelist.yml b/data-node/src/main/resources/opensearch/config/opensearch-security/whitelist.yml new file mode 100644 index 000000000000..1d54ff841af5 --- /dev/null +++ b/data-node/src/main/resources/opensearch/config/opensearch-security/whitelist.yml @@ -0,0 +1,69 @@ +--- +_meta: + type: "whitelist" + config_version: 2 + +# Description: +# enabled - feature flag. +# if enabled is false, the whitelisting feature is removed. +# This is like removing the check that checks if an API is whitelisted. +# This is equivalent to continuing with the usual access control checks, and removing all the code that implements whitelisting. +# if enabled is true, then all users except SuperAdmin can access only the APIs in requests +# SuperAdmin can access all APIs. +# SuperAdmin is defined by the SuperAdmin certificate, which is configured in the opensearch.yml setting: plugins.security.authcz.admin_dn: +# Refer to the example setting in opensearch.yml.example, and the opendistro documentation to know more about configuring SuperAdmin. +# +# requests - map of whitelisted endpoints, and the whitelisted HTTP requests for those endpoints + +# Examples showing how to configure this yml file (make sure the _meta data from above is also there): +# Example 1: +# To enable whitelisting and whitelist GET /_cluster/settings +# +#config: +# enabled: true +# requests: +# /_cluster/settings: +# - GET +# +# Example 2: +# If you want to whitelist multiple request methods for /_cluster/settings (GET,PUT): +# +#config: +# enabled: true +# requests: +# /_cluster/settings: +# - GET +# - PUT +# +# Example 3: +# If you want to whitelist other APIs as well, for example GET /_cat/nodes, and GET /_cat/shards: +# +#config: +# enabled: true +# requests: +# /_cluster/settings: +# - GET +# - PUT +# /_cat/nodes: +# - GET +# /_cat/shards: +# - GET +# +# Example 4: +# If you want to disable the whitelisting feature, set enabled to false. +# enabled: false +# requests: +# /_cluster/settings: +# - GET +# +#At this point, all APIs become whitelisted because the feature to whitelist is off, so requests is irrelevant. + + +#this name must be config +config: + enabled: false + requests: + /_cluster/settings: + - GET + /_cat/nodes: + - GET diff --git a/data-node/src/main/resources/org/graylog/datanode/metrics/metrics-ism.json b/data-node/src/main/resources/org/graylog/datanode/metrics/metrics-ism.json new file mode 100644 index 000000000000..b0ee80afbdae --- /dev/null +++ b/data-node/src/main/resources/org/graylog/datanode/metrics/metrics-ism.json @@ -0,0 +1,79 @@ +{ + "policy": { + "description": "Rollover and rollup index after one day, delete after 14 days", + "default_state": "open", + "states": [ + { + "name": "open", + "actions": [ + { + "rollover": { + "min_index_age": "1d" + } + } + ], + "transitions": [ + { + "state_name": "rollup" + } + ] + }, + { + "name": "rollup", + "actions": [ + { + "rollup": { + "ism_rollup": { + "target_index": "gl-datanode-metrics-daily", + "description": "Rollup index", + "page_size": 200, + "dimensions": [ + { + "date_histogram": { + "source_field": "timestamp", + "fixed_interval": "60m", + "timezone": "America/Los_Angeles" + } + } + ], + "metrics": [ + { + "source_field": "jvm_heap", + "metrics": [ + { + "avg": {} + } + ] + } + ] + } + } + } + ], + "transitions": [ + { + "state_name": "delete", + "conditions": { + "min_index_age": "13d" + } + } + ] + }, + { + "name": "delete", + "actions": [ + { + "delete": {} + } + ], + "transitions": [] + } + ], + "ism_template": { + "index_patterns": [ + "gl-datanode-metrics" + ], + "priority": 1 + } + } +} diff --git a/data-node/src/main/resources/org/graylog2/featureflag/feature-flag.config b/data-node/src/main/resources/org/graylog2/featureflag/feature-flag.config new file mode 100644 index 000000000000..9c420b74a7ac --- /dev/null +++ b/data-node/src/main/resources/org/graylog2/featureflag/feature-flag.config @@ -0,0 +1,58 @@ +########################################### +# GRAYLOG FEATURE FLAG CONFIGURATION FILE # +########################################### +# +# * Entries are generally expected to be a single line of the form, one of the following: +# +# propertyName=propertyValue +# propertyName:propertyValue +# +# * White space that appears between the property name and property value is ignored, +# so the following are equivalent: +# +# name=Stephen +# name = Stephen +# +# * White space at the beginning of the line is also ignored. +# +# * Lines that start with the comment characters ! or # are ignored. Blank lines are also ignored. +# +# * The property value is generally terminated by the end of the line. White space following the +# property value is not ignored, and is treated as part of the property value. +# +# 'boolean' feature flags can be entered as follows: feature1=ON/on. On/on values are treated as 'true', other values +# treated as 'false'. The name of a feature flag must match the regex [a-zA-Z_][a-zA-Z0-9_]* otherwise no metrics can be +# collected. +# Feature flags in this file can be overridden or extended by a custom feature flag file, java system properties or environment variables. +# The order in which the flags are overridden is: this file <- custom file <- system properties <- environment variables. +# The access to a feature flag value is case-insensitive e.g. if a feature flag is specified as 'feature1' it can also be +# accessed with 'FEATURE1'. The override logic for the keys is also case-insensitive. +# +# Custom Feature Flag File: +# feature flags can be overridden or extended with a custom properties file in /etc/graylog/server/feature-flag.conf +# or if a file is specified on 'server' command as command line parameter e.g. server -ff feature-flag.config +# +# Java System Properties: +# To add feature flags as java system properties the key of the property requires the following prefix 'graylog.feature.', +# otherwise it is not recognized as a feature flag. If a feature flag was added as follows '-Dgraylog.feature.feature1=on' it +# can be accessed with 'feature1'/'FEATURE1', the prefix is not needed. This would override a feature flag in this file which is +# specified as follows 'feature1=on'/'FEATURE1=on' +# +# Environment Variables: +# To add feature flags as environment variable the key of the environment variable requires the following prefix 'GRAYLOG_FEATURE_', +# otherwise it is not recognized as a feature flag. If a feature flag is added as follows 'GRAYLOG_FEATURE_FEATURE1=on" it +# can be accessed with 'feature1'/'FEATURE1', the prefix is not needed. This would override a feature flag in this file which is +# specified as follows 'feature1=on'/'FEATURE1=on' +# +# After adding a feature flag it can be accessed over the FeatureFlags interface. +# The FeatureFlags interface is a protected field in the CmdLineTool class so all subclasses have access to the flags. +# It can also be injected after the dependency injection phase. +# +# All feature flags should be entered and documented in this file. If a feature is irreversible (e.g. data migration) +# then document the feature flag well enough. + +# Legacy aggregation wizard flag: +# This flag enable the usage of the legacy AggregationControls in place of the new AggregationWizard. +# It can be enabled with 'legacy-aggregation-wizard=on', the flag is disabled by default. + +# Enabling search filters per default now for everybody diff --git a/data-node/src/main/resources/version.properties b/data-node/src/main/resources/version.properties new file mode 100644 index 000000000000..afba2c57236c --- /dev/null +++ b/data-node/src/main/resources/version.properties @@ -0,0 +1 @@ +project.version=${project.version} diff --git a/data-node/src/test/java/org/graylog/datanode/ConfigurationDocumentationTest.java b/data-node/src/test/java/org/graylog/datanode/ConfigurationDocumentationTest.java new file mode 100644 index 000000000000..3b63f09e7c48 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/ConfigurationDocumentationTest.java @@ -0,0 +1,127 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode; + +import com.github.joschi.jadconfig.Parameter; +import com.github.joschi.jadconfig.ReflectionUtils; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; +import org.apache.commons.lang3.ClassUtils; +import org.bson.assertions.Assertions; +import org.graylog.datanode.commands.Datanode; +import org.graylog2.configuration.Documentation; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.StringWriter; +import java.lang.reflect.Field; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +class ConfigurationDocumentationTest { + + @Test + void testAllFieldsAreDocumented() { + final List datanodeConfiguration = new Datanode().getNodeCommandConfigurationBeans(); + final List undocumentedFields = datanodeConfiguration.stream().flatMap(configurationBean -> { + return Arrays.stream(configurationBean.getClass().getDeclaredFields()) + .filter(f -> f.isAnnotationPresent(Parameter.class)) + .filter(f -> !f.isAnnotationPresent(Documentation.class)); + }).toList(); + + + if (!undocumentedFields.isEmpty()) { + final String fields = undocumentedFields.stream() + .map(Field::toString) + .collect(Collectors.joining("\n")); + Assertions.fail("Following datanode configuration fields require @Documentation annotation: \n" + fields); + } + } + + /** + * When started, this will output to STDOUT the CSV table of datanode's configuration documentation. + */ + public static void main(String[] args) throws IOException { + final StringWriter stringWriter = new StringWriter(); + try (CSVPrinter printer = new CSVPrinter(stringWriter, CSVFormat.EXCEL)) { + + printer.printRecord("Parameter", "Type", "Required", "Default value", "Description"); + + final List datanodeConfiguration = new Datanode().getNodeCommandConfigurationBeans(); + + datanodeConfiguration.forEach(configurationBean -> { + Arrays.stream(configurationBean.getClass().getDeclaredFields()) + .filter(f -> f.isAnnotationPresent(Parameter.class)) + .filter(ConfigurationDocumentationTest::isPublicFacing) + .map(f -> toConfigurationField(f, configurationBean)) + .forEach(f -> { + try { + printer.printRecord(f.configName(), f.type(), f.required(), f.defaultValue(), f.documentation()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + }); + System.out.println(stringWriter); + } + } + + private static boolean isPublicFacing(Field f) { + return !f.isAnnotationPresent(Documentation.class) || f.getAnnotation(Documentation.class).visible(); + } + + private static ConfigurationField toConfigurationField(Field f, Object instance) { + + final String documentation = Optional.ofNullable(f.getAnnotation(Documentation.class)) + .map(Documentation::value).orElse(null); + + final Parameter parameter = f.getAnnotation(Parameter.class); + final String propertyName = parameter.value(); + final Object defaultValue = getDefaultValue(f, instance); + + final String type = getType(f); + + final boolean required = parameter.required(); + + return new ConfigurationField(f.getName(), type, propertyName, defaultValue, required, documentation); + } + + private static Object getDefaultValue(Field f, Object instance) { + final Object defaultValue; + try { + defaultValue = ReflectionUtils.getFieldValue(instance, f); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + return defaultValue; + } + + private static String getType(Field f) { + if (f.getType().isPrimitive()) { + return ClassUtils.primitiveToWrapper(f.getType()).getSimpleName(); + } else { + return f.getType().getSimpleName(); + } + } + + + private record ConfigurationField(String fieldName, String type, String configName, Object defaultValue, + boolean required, String documentation) { + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/DataNodeCertRenewalPeriodicalTest.java b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/DataNodeCertRenewalPeriodicalTest.java new file mode 100644 index 000000000000..76b3b69cc55a --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/DataNodeCertRenewalPeriodicalTest.java @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.google.common.eventbus.EventBus; +import jakarta.annotation.Nonnull; +import org.assertj.core.api.Assertions; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.graylog.datanode.configuration.DatanodeDirectories; +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.opensearch.CsrRequester; +import org.graylog.security.certutil.CertRequest; +import org.graylog.security.certutil.CertificateGenerator; +import org.graylog.security.certutil.KeyPair; +import org.graylog.security.certutil.cert.CertificateChain; +import org.graylog.security.certutil.csr.CsrSigner; +import org.graylog2.plugin.certificates.RenewalPolicy; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.mockito.Mockito; + +import java.nio.file.Path; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.List; +import java.util.function.Supplier; + +class DataNodeCertRenewalPeriodicalTest { + + + private Path tempDir; + + @BeforeEach + void setUp(@TempDir Path tempDir) { + this.tempDir = tempDir; + } + + @Test + void testAlreadyExpired() throws Exception { + final DatanodeKeystore datanodeKeystore = datanodeKeystore(Duration.ofNanos(1)); + final CsrRequester csrRequester = Mockito.mock(CsrRequester.class); + final DataNodeCertRenewalPeriodical periodical = new DataNodeCertRenewalPeriodical( + datanodeKeystore, autoRenewalPolicy("PT1M"), + csrRequester, + () -> false + ); + periodical.doRun(); + Mockito.verify(csrRequester, Mockito.times(1)).triggerCertificateSigningRequest(); + } + + + @Test + void testExpiringSoon() throws Exception { + final DatanodeKeystore datanodeKeystore = datanodeKeystore(Duration.ofMinutes(1)); + final CsrRequester csrRequester = Mockito.mock(CsrRequester.class); + final DataNodeCertRenewalPeriodical periodical = new DataNodeCertRenewalPeriodical(datanodeKeystore, autoRenewalPolicy("PT1M"), csrRequester, () -> false); + periodical.doRun(); + Mockito.verify(csrRequester, Mockito.times(1)).triggerCertificateSigningRequest(); + } + + + @Test + void testExpiringInFarFuture() throws Exception { + final DatanodeKeystore datanodeKeystore = datanodeKeystore(Duration.ofDays(30)); + final CsrRequester csrRequester = Mockito.mock(CsrRequester.class); + final DataNodeCertRenewalPeriodical periodical = new DataNodeCertRenewalPeriodical(datanodeKeystore, autoRenewalPolicy("P3M"), csrRequester, () -> false); + periodical.doRun(); + Mockito.verify(csrRequester, Mockito.never()).triggerCertificateSigningRequest(); + } + + @Nonnull + private static Supplier autoRenewalPolicy(String duration) { + return () -> new RenewalPolicy(RenewalPolicy.Mode.AUTOMATIC, duration); + } + + + private DatanodeKeystore datanodeKeystore(Duration certValidity) throws Exception { + final DatanodeKeystore datanodeKeystore = new DatanodeKeystore(new DatanodeDirectories(tempDir, tempDir, tempDir, tempDir), "foobar", new EventBus()); + datanodeKeystore.create(generateKeyPair(certValidity)); + + final PKCS10CertificationRequest csr = datanodeKeystore.createCertificateSigningRequest("my-hostname", List.of("second-hostname")); + Assertions.assertThat(csr.getSubject().toString()).isEqualTo("CN=my-hostname"); + + final CsrSigner signer = new CsrSigner(); + final KeyPair ca = CertificateGenerator.generate(CertRequest.selfSigned("Graylog CA").isCA(true).validity(Duration.ofDays(365))); + final X509Certificate datanodeCert = signer.sign(ca.privateKey(), ca.certificate(), csr, (int) certValidity.toDays()); + final CertificateChain certChain = new CertificateChain(datanodeCert, List.of(ca.certificate())); + + datanodeKeystore.replaceCertificatesInKeystore(certChain); + + return datanodeKeystore; + } + + + private KeyPair generateKeyPair(Duration validity) throws Exception { + final CertRequest certRequest = CertRequest.selfSigned(DatanodeKeystore.DATANODE_KEY_ALIAS) + .isCA(false) + .validity(validity); + return CertificateGenerator.generate(certRequest); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/DatanodeDirectoriesLockfileCheckTest.java b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/DatanodeDirectoriesLockfileCheckTest.java new file mode 100644 index 000000000000..cb143481d0f6 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/DatanodeDirectoriesLockfileCheckTest.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.configuration.DatanodeDirectories; +import org.graylog2.bootstrap.preflight.PreflightCheck; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +class DatanodeDirectoriesLockfileCheckTest { + + public static final String VALID_NODE_ID = "5ca1ab1e-0000-4000-a000-000000000000"; + public static final String OTHER_NODE_ID = "5ca1ab1e-0000-4000-a000-000000000001"; + + @Test + void testLockCreation(@TempDir Path dataDir, + @TempDir Path logsDir, + @TempDir Path configDir) throws IOException { + + final Path logsDirLock = logsDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); + final Path configDirLock = configDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); + + final PreflightCheck check = new DatanodeDirectoriesLockfileCheck(VALID_NODE_ID, new DatanodeDirectories(dataDir, logsDir, null, configDir)); + check.runCheck(); + + Assertions.assertThat(Files.readString(logsDirLock)).isEqualTo(VALID_NODE_ID); + Assertions.assertThat(Files.readString(configDirLock)).isEqualTo(VALID_NODE_ID); + } + + + @Test + void testValidExistingLock(@TempDir Path dataDir, + @TempDir Path logsDir, + @TempDir Path configDir) throws IOException { + + final Path logsDirLock = logsDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); + final Path configDirLock = configDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); + + Files.writeString(logsDirLock, VALID_NODE_ID); + Files.writeString(configDirLock, VALID_NODE_ID); + + final PreflightCheck check = new DatanodeDirectoriesLockfileCheck(VALID_NODE_ID, new DatanodeDirectories(dataDir, logsDir, null, configDir)); + check.runCheck(); + + Assertions.assertThat(Files.readString(logsDirLock)).isEqualTo(VALID_NODE_ID); + Assertions.assertThat(Files.readString(configDirLock)).isEqualTo(VALID_NODE_ID); + } + + @Test + void testInvalidExistingLock(@TempDir Path dataDir, + @TempDir Path logsDir, + @TempDir Path configDir) throws IOException { + + final Path logsDirLock = logsDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); + final Path configDirLock = configDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); + + Files.writeString(logsDirLock, OTHER_NODE_ID); + Files.writeString(configDirLock, OTHER_NODE_ID); + + final PreflightCheck check = new DatanodeDirectoriesLockfileCheck(VALID_NODE_ID, new DatanodeDirectories(dataDir, logsDir, null, configDir)); + + Assertions.assertThatThrownBy(check::runCheck) + .isInstanceOf(DatanodeLockFileException.class) + .hasMessageContaining("locked for datanode 5ca1ab1e-0000-4000-a000-000000000001, access with datanode 5ca1ab1e-0000-4000-a000-000000000000 rejected"); + } + +} diff --git a/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/LegacyDatanodeKeystoreProviderTest.java b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/LegacyDatanodeKeystoreProviderTest.java new file mode 100644 index 000000000000..275c7bd7ad6d --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/LegacyDatanodeKeystoreProviderTest.java @@ -0,0 +1,160 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.UpdateOptions; +import jakarta.annotation.Nonnull; +import org.assertj.core.api.Assertions; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.bson.Document; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.security.certutil.CertRequest; +import org.graylog.security.certutil.CertificateGenerator; +import org.graylog.security.certutil.KeyPair; +import org.graylog.security.certutil.ca.exceptions.KeyStoreStorageException; +import org.graylog.security.certutil.cert.CertificateChain; +import org.graylog.security.certutil.csr.CsrGenerator; +import org.graylog.security.certutil.csr.CsrSigner; +import org.graylog.security.certutil.csr.InMemoryKeystoreInformation; +import org.graylog.security.certutil.csr.exceptions.CSRGenerationException; +import org.graylog.testing.mongodb.MongoDBInstance; +import org.graylog2.database.MongoConnection; +import org.graylog2.plugin.system.SimpleNodeId; +import org.graylog2.security.encryption.EncryptedValue; +import org.graylog2.security.encryption.EncryptedValueService; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import java.io.ByteArrayOutputStream; +import java.security.Key; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.Base64; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import static com.mongodb.client.model.Filters.eq; +import static com.mongodb.client.model.Updates.combine; +import static com.mongodb.client.model.Updates.set; +import static org.graylog.datanode.configuration.DatanodeKeystore.DATANODE_KEY_ALIAS; + +class LegacyDatanodeKeystoreProviderTest { + public final MongoDBInstance mongodb = MongoDBInstance.createForClass(); + + @BeforeEach + void setUp() { + mongodb.start(); + } + + @AfterEach + void tearDown() { + mongodb.close(); + } + + @Test + void testReadLegacyKeystore() throws Exception { + final MongoConnection mongoConnection = mongodb.mongoConnection(); + + final String passwordSecret = "this_is_my_secret_password"; + final SimpleNodeId nodeId = new SimpleNodeId("5ca1ab1e-0000-4000-a000-000000000000"); + final EncryptedValueService encryptedValueService = new EncryptedValueService(passwordSecret); + + final KeyStore keystore = createSignedKeystore(passwordSecret); + final String keystoreStringRepresentation = keystoreToBase64(keystore, passwordSecret.toCharArray()); + writeCertToMongo(mongoConnection.getMongoDatabase(), nodeId, keystoreStringRepresentation, encryptedValueService); + + final LegacyDatanodeKeystoreProvider legacyDatanodeKeystoreProvider = new LegacyDatanodeKeystoreProvider(nodeId, passwordSecret, Mockito.mock(DatanodeConfiguration.class), mongoConnection, encryptedValueService); + + final Optional legacyKeystore = legacyDatanodeKeystoreProvider.get(); + Assertions.assertThat(legacyKeystore) + .isPresent() + .hasValueSatisfying(keyStore -> { + try { + Assertions.assertThat(keyStore.getKey("datanode", passwordSecret.toCharArray())).isNotNull(); + Assertions.assertThat(keyStore.getCertificateChain("datanode")).isNotNull().hasSize(2); + } catch (KeyStoreException | NoSuchAlgorithmException | UnrecoverableKeyException e) { + throw new RuntimeException(e); + } + }); + } + + private static void writeCertToMongo(MongoDatabase mongoDatabase, SimpleNodeId nodeId, String keystoreStringRepresentation, EncryptedValueService encryptionService) { + MongoCollection dbCollection = mongoDatabase.getCollection(LegacyDatanodeKeystoreProvider.LEGACY_COLLECTION_NAME); + final EncryptedValue encrypted = encryptionService.encrypt(keystoreStringRepresentation); + dbCollection.updateOne( + eq("node_id", nodeId.getNodeId()), + combine( + set("node_id", nodeId.getNodeId()), + set(LegacyDatanodeKeystoreProvider.ENCRYPTED_CERTIFICATE_FIELD + ".encrypted_value", encrypted.value()), + set(LegacyDatanodeKeystoreProvider.ENCRYPTED_CERTIFICATE_FIELD + ".salt", encrypted.salt()) + ), + new UpdateOptions().upsert(true) + ); + } + + @Nonnull + private static KeyStore createSignedKeystore(String passwordSecret) throws Exception { + final KeyPair keyPair = generateKeyPair(); + final KeyStore keystore = keyPair.toKeystore("datanode", passwordSecret.toCharArray()); + final CertificateChain signed = singCertChain(keystore, passwordSecret); + + Key privateKey = keystore.getKey(DATANODE_KEY_ALIAS, passwordSecret.toCharArray()); + // replace the existing self-signed certificates chain with the signed chain from the event + keystore.setKeyEntry(DATANODE_KEY_ALIAS, privateKey, passwordSecret.toCharArray(), signed.toCertificateChainArray()); + return keystore; + } + + private static CertificateChain singCertChain(KeyStore keystore, String passwordSecret) throws Exception { + final PKCS10CertificationRequest csr = csr(keystore, passwordSecret); + final CsrSigner signer = new CsrSigner(); + final KeyPair ca = CertificateGenerator.generate(CertRequest.selfSigned("Graylog CA").isCA(true).validity(Duration.ofDays(365))); + final X509Certificate datanodeCert = signer.sign(ca.privateKey(), ca.certificate(), csr, 30); + final CertificateChain certChain = new CertificateChain(datanodeCert, List.of(ca.certificate())); + return certChain; + } + + private static PKCS10CertificationRequest csr(KeyStore keystore, String passwordSecret) throws CSRGenerationException { + final InMemoryKeystoreInformation keystoreInformation = new InMemoryKeystoreInformation(keystore, passwordSecret.toCharArray()); + return CsrGenerator.generateCSR(keystoreInformation, DATANODE_KEY_ALIAS, "my-hostname", Collections.emptyList()); + } + + @Nonnull + private static KeyPair generateKeyPair() throws Exception { + final CertRequest certRequest = CertRequest.selfSigned(DATANODE_KEY_ALIAS) + .isCA(false) + .validity(Duration.ofDays(31)); + return CertificateGenerator.generate(certRequest); + } + + private static String keystoreToBase64(final KeyStore keyStore, char[] keystorePassword) throws KeyStoreStorageException { + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + keyStore.store(baos, keystorePassword); + return Base64.getEncoder().encodeToString(baos.toByteArray()); + } catch (Exception ex) { + throw new KeyStoreStorageException("Failed to save keystore to Mongo collection for node ", ex); + } + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/OpensearchBinPreflightCheckTest.java b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/OpensearchBinPreflightCheckTest.java new file mode 100644 index 000000000000..0c6fddf1f523 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/bootstrap/preflight/OpensearchBinPreflightCheckTest.java @@ -0,0 +1,100 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.bootstrap.preflight; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.OpensearchDistribution; +import org.graylog2.bootstrap.preflight.PreflightCheckException; +import org.graylog2.shared.utilities.StringUtils; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Collections; + +public class OpensearchBinPreflightCheckTest { + + @TempDir + private Path tempDir; + + @Test + void testNonexistentDirectory() { + final Path baseDirectory = tempDir.resolve("nonexistent"); + + final OpensearchDistribution dist = new OpensearchDistribution(baseDirectory, "2.10.0"); + + final OpensearchBinPreflightCheck check = new OpensearchBinPreflightCheck(() -> dist); + Assertions.assertThatThrownBy(check::runCheck) + .isInstanceOf(PreflightCheckException.class) + .hasMessage("Opensearch base directory %s doesn't exist!", baseDirectory.toAbsolutePath()); + } + + + @Test + void testNonexistentBinary() throws IOException { + final Path baseDir = tempDir.resolve("opensearch"); + final Path binDir = baseDir.resolve("bin"); + Files.createDirectories(binDir); + + // nonexistent! + final Path executable = binDir.resolve("opensearch"); + + final OpensearchDistribution dist = new OpensearchDistribution(baseDir, "2.10.0"); + final OpensearchBinPreflightCheck check = new OpensearchBinPreflightCheck(() -> dist); + + Assertions.assertThatThrownBy(check::runCheck) + .isInstanceOf(PreflightCheckException.class) + .hasMessage("Opensearch binary %s doesn't exist!", executable.toAbsolutePath()); + } + + @Test + void testBinaryWithoutExecPermission() throws IOException { + final Path baseDir = tempDir.resolve("opensearch"); + final Path binDir = baseDir.resolve("bin"); + Files.createDirectories(binDir); + final Path executable = binDir.resolve("opensearch"); + Files.createFile(executable); + + final OpensearchDistribution dist = new OpensearchDistribution(baseDir, "2.10.0"); + + final OpensearchBinPreflightCheck check = new OpensearchBinPreflightCheck(() -> dist); + Assertions.assertThatThrownBy(check::runCheck) + .isInstanceOf(PreflightCheckException.class) + .hasMessageStartingWith(StringUtils.f("Opensearch binary %s is not executable!", executable.toAbsolutePath())); + } + + + @Test + void testBinaryOk() throws IOException { + final Path baseDir = tempDir.resolve("opensearch"); + final Path binDir = baseDir.resolve("bin"); + Files.createDirectories(binDir); + final Path executable = binDir.resolve("opensearch"); + Files.createFile(executable); + Files.setPosixFilePermissions(executable, Collections.singleton(PosixFilePermission.OWNER_EXECUTE)); + + final OpensearchDistribution dist = new OpensearchDistribution(baseDir, "2.10.0"); + + final OpensearchBinPreflightCheck check = new OpensearchBinPreflightCheck(() -> dist); + Assertions.assertThatCode(check::runCheck) + .doesNotThrowAnyException(); + } +} + diff --git a/data-node/src/test/java/org/graylog/datanode/configuration/DatanodeDirectoriesTest.java b/data-node/src/test/java/org/graylog/datanode/configuration/DatanodeDirectoriesTest.java new file mode 100644 index 000000000000..d39f0a9794ac --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/configuration/DatanodeDirectoriesTest.java @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import org.assertj.core.api.Assertions; +import org.graylog2.plugin.system.SimpleNodeId; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.PosixFilePermission; + +class DatanodeDirectoriesTest { + + @Test + void testConfigDirPermissions(@TempDir Path dataDir, @TempDir Path logsDir, @TempDir Path configSourceDir, @TempDir Path configTargetDir) throws IOException { + final DatanodeDirectories datanodeDirectories = new DatanodeDirectories(dataDir, logsDir, configSourceDir, configTargetDir); + final OpensearchConfigurationDir dir = datanodeDirectories.createUniqueOpensearchProcessConfigurationDir(); + + Assertions.assertThat(Files.getPosixFilePermissions(dir.configurationRoot())). + contains( + PosixFilePermission.OWNER_EXECUTE, + PosixFilePermission.OWNER_WRITE, + PosixFilePermission.OWNER_READ + ); + + final Path keyFile = dir.createOpensearchProcessConfigurationFile(Path.of("my-secret-file.key")); + Assertions.assertThat(Files.getPosixFilePermissions(keyFile)). + contains( + PosixFilePermission.OWNER_WRITE, + PosixFilePermission.OWNER_READ + ); + } + + /** + * Remove in 6.0 together with the backwards compatibility of datanode dirs + */ + @Deprecated(forRemoval = true) + @SuppressWarnings("removal") + @Test + void testBackwardsCompatibility(@TempDir Path tempDir) throws IOException { + final Path withoutSubdir = DatanodeDirectories.backwardsCompatible(tempDir, new SimpleNodeId("5ca1ab1e-0000-4000-a000-000000000000"), "my_config_property"); + Assertions.assertThat(withoutSubdir).isEqualTo(tempDir); + + Files.createDirectories(tempDir.resolve("5ca1ab1e-0000-4000-a000-000000000000")); + + final Path withSubdir = DatanodeDirectories.backwardsCompatible(tempDir, new SimpleNodeId("5ca1ab1e-0000-4000-a000-000000000000"), "my_config_property"); + + Assertions.assertThat(withSubdir) + .startsWith(tempDir) + .endsWith(Path.of("5ca1ab1e-0000-4000-a000-000000000000")); + + + } + +} diff --git a/data-node/src/test/java/org/graylog/datanode/configuration/DatanodeKeystoreTest.java b/data-node/src/test/java/org/graylog/datanode/configuration/DatanodeKeystoreTest.java new file mode 100644 index 000000000000..0122176ada84 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/configuration/DatanodeKeystoreTest.java @@ -0,0 +1,131 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.google.common.eventbus.EventBus; +import com.google.common.eventbus.Subscribe; +import jakarta.annotation.Nonnull; +import org.assertj.core.api.Assertions; +import org.bouncycastle.openssl.PEMEncryptor; +import org.bouncycastle.openssl.jcajce.JcaMiscPEMGenerator; +import org.bouncycastle.openssl.jcajce.JcaPEMWriter; +import org.bouncycastle.openssl.jcajce.JcePEMEncryptorBuilder; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.graylog.security.certutil.CertRequest; +import org.graylog.security.certutil.CertificateGenerator; +import org.graylog.security.certutil.KeyPair; +import org.graylog.security.certutil.cert.CertificateChain; +import org.graylog.security.certutil.csr.CsrSigner; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.FileWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.LinkedList; +import java.util.List; + +class DatanodeKeystoreTest { + + private EventBus eventBus; + private final List receivedEvents = new LinkedList<>(); + + @BeforeEach + void setUp() { + eventBus = new EventBus(); + eventBus.register(this); + } + + @AfterEach + void tearDown() { + eventBus.unregister(this); + } + + @Subscribe + public void subscribe(DatanodeKeystoreChangedEvent event) { + // remember received events so we can verify them later + receivedEvents.add(event); + } + + @Test + void testCreateRead(@TempDir Path tempDir) throws Exception { + final DatanodeKeystore datanodeKeystore = new DatanodeKeystore(new DatanodeDirectories(tempDir, tempDir, tempDir, tempDir), "foobar", this.eventBus); + Assertions.assertThat(datanodeKeystore.exists()).isFalse(); + + final KeyPair keyPair = generateKeyPair(); + + datanodeKeystore.create(keyPair); + Assertions.assertThat(datanodeKeystore.exists()).isTrue(); + + Assertions.assertThat(datanodeKeystore.hasSignedCertificate()).isFalse(); + final PKCS10CertificationRequest csr = datanodeKeystore.createCertificateSigningRequest("my-hostname", List.of("second-hostname")); + Assertions.assertThat(csr.getSubject().toString()).isEqualTo("CN=my-hostname"); + + final CsrSigner signer = new CsrSigner(); + final KeyPair ca = CertificateGenerator.generate(CertRequest.selfSigned("Graylog CA").isCA(true).validity(Duration.ofDays(365))); + final X509Certificate datanodeCert = signer.sign(ca.privateKey(), ca.certificate(), csr, 30); + final CertificateChain certChain = new CertificateChain(datanodeCert, List.of(ca.certificate())); + + datanodeKeystore.replaceCertificatesInKeystore(certChain); + + Assertions.assertThat(this.receivedEvents).hasSize(1); + + Assertions.assertThat(datanodeKeystore.hasSignedCertificate()).isTrue(); + } + + @Test + void testIntermediateCA(@TempDir Path tempDir) throws Exception { + + final DatanodeKeystore datanodeKeystore = new DatanodeKeystore(new DatanodeDirectories(tempDir, tempDir, tempDir, tempDir), "foobar", this.eventBus); + datanodeKeystore.create(generateKeyPair()); + + + final KeyPair rootCa = CertificateGenerator.generate(CertRequest.selfSigned("root") + .isCA(true) + .validity(Duration.ofDays(365))); + + final KeyPair intermediate = CertificateGenerator.generate(CertRequest.signed("intermediate", rootCa) + .isCA(true) + .validity(Duration.ofDays(365))); + + final KeyPair server = CertificateGenerator.generate(CertRequest.signed("server", intermediate) + .isCA(true) + .validity(Duration.ofDays(365))); + + final PKCS10CertificationRequest csr = datanodeKeystore.createCertificateSigningRequest("my-hostname", List.of("second-hostname")); + + final CsrSigner signer = new CsrSigner(); + final X509Certificate datanodeCert = signer.sign(server.privateKey(), server.certificate(), csr, 30); + final CertificateChain certChain = new CertificateChain(datanodeCert, List.of(server.certificate(), intermediate.certificate(), rootCa.certificate())); + + datanodeKeystore.replaceCertificatesInKeystore(certChain); + + Assertions.assertThat(datanodeKeystore.hasSignedCertificate()).isTrue(); + } + + @Nonnull + private static KeyPair generateKeyPair() throws Exception { + final CertRequest certRequest = CertRequest.selfSigned(DatanodeKeystore.DATANODE_KEY_ALIAS) + .isCA(false) + .validity(Duration.ofDays(31)); + return CertificateGenerator.generate(certRequest); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/configuration/OpensearchArchitectureTest.java b/data-node/src/test/java/org/graylog/datanode/configuration/OpensearchArchitectureTest.java new file mode 100644 index 000000000000..ab3fd2f82eb8 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/configuration/OpensearchArchitectureTest.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +class OpensearchArchitectureTest { + + @Test + void nameKnown() { + final OpensearchArchitecture amd64 = OpensearchArchitecture.fromCode("amd64"); + final OpensearchArchitecture x8664 = OpensearchArchitecture.fromCode("x86_64"); + final OpensearchArchitecture x64 = OpensearchArchitecture.fromCode("x64"); + Assertions.assertThat(amd64).isEqualTo(OpensearchArchitecture.x64); + Assertions.assertThat(x8664).isEqualTo(OpensearchArchitecture.x64); + Assertions.assertThat(x64).isEqualTo(OpensearchArchitecture.x64); + } + + @Test + void fromCodeUnknown() { + Assertions.assertThatThrownBy(() -> OpensearchArchitecture.fromCode("nonsense")) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessage("Unsupported OpenSearch distribution architecture: nonsense"); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/configuration/OpensearchDistributionProviderTest.java b/data-node/src/test/java/org/graylog/datanode/configuration/OpensearchDistributionProviderTest.java new file mode 100644 index 000000000000..eff234e99857 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/configuration/OpensearchDistributionProviderTest.java @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.OpensearchDistribution; +import jakarta.annotation.Nonnull; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +class OpensearchDistributionProviderTest { + + @TempDir + private Path tempDir; + + @TempDir + private Path emptyTempDir; + + @TempDir + private Path tempDirWithoutArch; + + @BeforeEach + void setUp() throws IOException { + Files.createDirectory(tempDir.resolve("opensearch-2.5.0-linux-x64")); + Files.createDirectory(tempDir.resolve("opensearch-2.5.0-linux-aarch64")); + Files.createDirectory(tempDir.resolve("somethingelse")); // just to include something which is not OS dist + + Files.createDirectory(tempDirWithoutArch.resolve("opensearch-2.4.1")); + Files.createDirectory(tempDir.resolve(".config")); // just to include something which is not OS dist + } + + @Test + void testFailedDetectionInDirectory() { + Assertions.assertThatThrownBy(() -> + provider(emptyTempDir.resolve("nonexistent"), OpensearchArchitecture.x64).get()) + .hasMessageStartingWith("Failed to list content of provided directory"); + + + Assertions.assertThatThrownBy(() -> provider(emptyTempDir, OpensearchArchitecture.x64).get()) + .hasMessageStartingWith("Could not detect any opensearch distribution"); + } + + @Test + void testDetection() { + final OpensearchDistribution x64 = provider(tempDir, OpensearchArchitecture.x64).get(); + Assertions.assertThat(x64.version()).isEqualTo("2.5.0"); + Assertions.assertThat(x64.platform()).isEqualTo("linux"); + Assertions.assertThat(x64.architecture()).isEqualTo(OpensearchArchitecture.x64); + + final OpensearchDistribution mac = provider(tempDir, OpensearchArchitecture.x64).get(); + Assertions.assertThat(mac.version()).isEqualTo("2.5.0"); + Assertions.assertThat(mac.platform()).isEqualTo("linux"); + Assertions.assertThat(mac.architecture()).isEqualTo(OpensearchArchitecture.x64); + + final OpensearchDistribution aarch64 = provider(tempDir, OpensearchArchitecture.aarch64).get(); + Assertions.assertThat(aarch64.version()).isEqualTo("2.5.0"); + Assertions.assertThat(aarch64.platform()).isEqualTo("linux"); + Assertions.assertThat(aarch64.architecture()).isEqualTo(OpensearchArchitecture.aarch64); + } + + @Test + void testDetectionWithoutArch() { + final OpensearchDistribution dist = provider(tempDirWithoutArch, OpensearchArchitecture.x64).get(); + Assertions.assertThat(dist.version()).isEqualTo("2.4.1"); + Assertions.assertThat(dist.architecture()).isNull(); + Assertions.assertThat(dist.platform()).isNull(); + } + + @Test + void testBackwardsCompatibility() { + // we are not pointing to the root directory which should contain different OS distributions but rather directly to one + // specific distribution. + final OpensearchDistribution dist = provider(tempDir.resolve("opensearch-2.5.0-linux-x64"), OpensearchArchitecture.x64).get(); + Assertions.assertThat(dist.version()).isEqualTo("2.5.0"); + Assertions.assertThat(dist.platform()).isEqualTo("linux"); + Assertions.assertThat(dist.architecture()).isEqualTo(OpensearchArchitecture.x64); + } + + @Nonnull + private OpensearchDistributionProvider provider(Path dir, OpensearchArchitecture arch) { + return new OpensearchDistributionProvider(dir, arch); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/configuration/S3RepositoryConfigurationTest.java b/data-node/src/test/java/org/graylog/datanode/configuration/S3RepositoryConfigurationTest.java new file mode 100644 index 000000000000..1e9b1bc078bb --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/configuration/S3RepositoryConfigurationTest.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.configuration; + +import com.github.joschi.jadconfig.JadConfig; +import com.github.joschi.jadconfig.RepositoryException; +import com.github.joschi.jadconfig.ValidationException; +import com.github.joschi.jadconfig.repositories.InMemoryRepository; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Map; + +class S3RepositoryConfigurationTest { + + @Test + void testFullConfigurationEnabled() throws ValidationException, RepositoryException { + final S3RepositoryConfiguration config = initializeConfiguration(Map.of( + "s3_client_default_access_key", "user", + "s3_client_default_secret_key", "password", + "s3_client_default_endpoint", "http://localhost:9000" + + )); + Assertions.assertThat(config.isRepositoryEnabled()).isTrue(); + } + + @Test + void testEmptyConfigurationDisabled() throws ValidationException, RepositoryException { + final S3RepositoryConfiguration config = initializeConfiguration(Map.of()); + Assertions.assertThat(config.isRepositoryEnabled()).isFalse(); + } + + @Test + void testPartialConfigurationException() throws ValidationException, RepositoryException { + final S3RepositoryConfiguration config = initializeConfiguration(Map.of( + "s3_client_default_access_key", "user", + "s3_client_default_secret_key", "password" + + )); + Assertions.assertThatThrownBy(config::isRepositoryEnabled) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("S3 Client not configured properly"); + + } + + private S3RepositoryConfiguration initializeConfiguration(Map properties) throws RepositoryException, ValidationException { + final S3RepositoryConfiguration configuration = new S3RepositoryConfiguration(); + new JadConfig(new InMemoryRepository(properties), configuration).process(); + return configuration; + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/filesystem/index/IndicesDirectoryParserTest.java b/data-node/src/test/java/org/graylog/datanode/filesystem/index/IndicesDirectoryParserTest.java new file mode 100644 index 000000000000..7efc79a2ed83 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/filesystem/index/IndicesDirectoryParserTest.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.filesystem.index.indexreader.ShardStatsParserImpl; +import org.graylog.datanode.filesystem.index.statefile.StateFileParserImpl; +import org.graylog.datanode.filesystem.index.dto.IndexInformation; +import org.graylog.datanode.filesystem.index.dto.IndexerDirectoryInformation; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Path; + +class IndicesDirectoryParserTest { + + + private IndicesDirectoryParser parser; + + @BeforeEach + void setUp() { + parser = new IndicesDirectoryParser(new StateFileParserImpl(), new ShardStatsParserImpl()); + } + + @Test + void testOpensearch2() throws URISyntaxException { + final URI uri = getClass().getResource("/indices/opensearch2").toURI(); + final IndexerDirectoryInformation result = parser.parse(Path.of(uri)); + Assertions.assertThat(result.nodes()) + .hasSize(1) + .allSatisfy(node -> { + Assertions.assertThat(node.nodeVersion()).isEqualTo("2.10.0"); + Assertions.assertThat(node.indices()) + .hasSize(6) + .extracting(IndexInformation::indexName) + .contains(".opensearch-sap-log-types-config", ".plugins-ml-config", "graylog_0", ".opensearch-observability", ".opendistro_security", "security-auditlog-2023.11.24"); + + final IndexInformation graylog_0 = node.indices().stream().filter(i -> i.indexName().equals("graylog_0")).findFirst().orElseThrow(() -> new RuntimeException("Failed to detect graylog_0 index")); + + Assertions.assertThat(graylog_0.indexVersionCreated()).isEqualTo("2.10.0"); + + Assertions.assertThat(graylog_0.shards()) + .hasSize(1) + .allSatisfy(shard -> { + Assertions.assertThat(shard.documentsCount()).isEqualTo(1); + Assertions.assertThat(shard.name()).isEqualTo("S0"); + Assertions.assertThat(shard.primary()).isEqualTo(true); + Assertions.assertThat(shard.minLuceneVersion()).isEqualTo("9.7.0"); + }); + }); + } + + @Test + void testOpensearch1() throws URISyntaxException { + final URI uri = getClass().getResource("/indices/opensearch1").toURI(); + final IndexerDirectoryInformation result = parser.parse(Path.of(uri)); + Assertions.assertThat(result.nodes()) + .hasSize(1) + .allSatisfy(node -> { + Assertions.assertThat(node.nodeVersion()).isEqualTo("1.3.0"); + Assertions.assertThat(node.indices()) + .hasSize(1) + .extracting(IndexInformation::indexName) + .contains("graylog_0"); + + final IndexInformation graylog_0 = node.indices().stream().filter(i -> i.indexName().equals("graylog_0")).findFirst().orElseThrow(() -> new RuntimeException("Failed to detect graylog_0 index")); + + Assertions.assertThat(graylog_0.indexVersionCreated()).isEqualTo("1.3.0"); + + Assertions.assertThat(graylog_0.shards()) + .hasSize(1) + .allSatisfy(shard -> { + Assertions.assertThat(shard.documentsCount()).isEqualTo(1); + Assertions.assertThat(shard.name()).isEqualTo("S0"); + Assertions.assertThat(shard.primary()).isEqualTo(true); + Assertions.assertThat(shard.minLuceneVersion()).isEqualTo("8.10.1"); + }); + }); + } + + + @Test + void testElasticsearch7() throws URISyntaxException { + final URI uri = getClass().getResource("/indices/elasticsearch7").toURI(); + final IndexerDirectoryInformation result = parser.parse(Path.of(uri)); + Assertions.assertThat(result.nodes()) + .hasSize(1) + .allSatisfy(node -> { + Assertions.assertThat(node.nodeVersion()).isEqualTo("7.10.0"); + Assertions.assertThat(node.indices()) + .hasSize(1) + .extracting(IndexInformation::indexName) + .contains("graylog_0"); + + final IndexInformation graylog_0 = node.indices().stream().filter(i -> i.indexName().equals("graylog_0")).findFirst().orElseThrow(() -> new RuntimeException("Failed to detect graylog_0 index")); + + Assertions.assertThat(graylog_0.indexVersionCreated()).isEqualTo("7.10.0"); + + Assertions.assertThat(graylog_0.shards()) + .hasSize(1) + .allSatisfy(shard -> { + Assertions.assertThat(shard.documentsCount()).isEqualTo(1); + Assertions.assertThat(shard.name()).isEqualTo("S0"); + Assertions.assertThat(shard.primary()).isEqualTo(true); + Assertions.assertThat(shard.minLuceneVersion()).isEqualTo("8.7.0"); + }); + }); + } + + @Test + void testElasticsearch6() throws URISyntaxException { + final URI uri = getClass().getResource("/indices/elasticsearch6").toURI(); + Assertions.assertThatThrownBy(()->parser.parse(Path.of(uri))) + .isInstanceOf(IncompatibleIndexVersionException.class); + + } + + @Test + void testEmptyDataDir(@TempDir Path tempDir) { + final IndexerDirectoryInformation result = parser.parse(tempDir); + Assertions.assertThat(result).isNotNull(); + Assertions.assertThat(result.nodes()).isEmpty(); + + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParserTest.java b/data-node/src/test/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParserTest.java new file mode 100644 index 000000000000..1e74ea3a4b9d --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/filesystem/index/indexreader/ShardStatsParserTest.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.indexreader; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.filesystem.index.IncompatibleIndexVersionException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Path; + +class ShardStatsParserTest { + + private ShardStatsParserImpl shardStatsParser; + + @BeforeEach + void setUp() { + shardStatsParser = new ShardStatsParserImpl(); + } + + @Test + void testOpensearch2() throws URISyntaxException, IncompatibleIndexVersionException { + final URI shard = getClass().getResource("/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0").toURI(); + final ShardStats stats = shardStatsParser.read(Path.of(shard)); + Assertions.assertThat(stats.documentsCount()).isEqualTo(1); + Assertions.assertThat(stats.minSegmentLuceneVersion().toString()).isEqualTo("9.7.0"); + } + + @Test + void testElasticsearch7() throws URISyntaxException, IncompatibleIndexVersionException { + final URI shard = getClass().getResource("/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0").toURI(); + final ShardStats stats = shardStatsParser.read(Path.of(shard)); + Assertions.assertThat(stats.documentsCount()).isEqualTo(1); + Assertions.assertThat(stats.minSegmentLuceneVersion().toString()).isEqualTo("8.7.0"); + } + @Test + void testElasticsearch6() throws URISyntaxException { + final URI shard = getClass().getResource("/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/").toURI(); + Assertions.assertThatThrownBy(() -> shardStatsParser.read(Path.of(shard))) + .isInstanceOf(IncompatibleIndexVersionException.class); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/filesystem/index/statefile/StateFileParserTest.java b/data-node/src/test/java/org/graylog/datanode/filesystem/index/statefile/StateFileParserTest.java new file mode 100644 index 000000000000..45beb21f0ef4 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/filesystem/index/statefile/StateFileParserTest.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.filesystem.index.statefile; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Path; + +class StateFileParserTest { + + private StateFileParser parser; + + @BeforeEach + void setUp() { + this.parser = new StateFileParserImpl(); + } + + @Test + void parseOpensearch2() throws URISyntaxException { + final URI uri = getClass().getResource("/indices/opensearch2/nodes/0/_state/node-1.st").toURI(); + final StateFile stateFile = parser.parse(Path.of(uri)); + Assertions.assertThat(stateFile.document().get("node_id")).isEqualTo("yK5GvmLyRD2nbhAyyJL76w"); + Assertions.assertThat(stateFile.document().get("node_version")).isEqualTo(136317827); + } + + @Test + void parseElasticsearch7() throws URISyntaxException { + final URI uri = getClass().getResource("/indices/elasticsearch7/nodes/0/_state/node-0.st").toURI(); + final StateFile stateFile = parser.parse(Path.of(uri)); + Assertions.assertThat(stateFile.document().get("node_id")).isEqualTo("jy62YbbVQdOyvBoKDQb7mg"); + Assertions.assertThat(stateFile.document().get("node_version")).isEqualTo(7100099); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/initializers/JwtTokenAuthFilterTest.java b/data-node/src/test/java/org/graylog/datanode/initializers/JwtTokenAuthFilterTest.java new file mode 100644 index 000000000000..3675e056e35e --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/initializers/JwtTokenAuthFilterTest.java @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.initializers; + +import com.github.joschi.jadconfig.util.Duration; +import jakarta.annotation.Nonnull; +import jakarta.ws.rs.core.MultivaluedHashMap; +import org.assertj.core.api.Assertions; +import org.glassfish.jersey.server.ContainerRequest; +import org.graylog2.security.IndexerJwtAuthTokenProvider; +import org.graylog2.security.JwtSecret; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.Collections; +import java.util.Optional; + +import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; + +class JwtTokenAuthFilterTest { + + private static ContainerRequest mockRequest(String bearerToken) { + final ContainerRequest request = mock(ContainerRequest.class); + final MultivaluedHashMap headers = new MultivaluedHashMap<>(); + Optional.ofNullable(bearerToken).ifPresent(token -> headers.put("Authorization", Collections.singletonList(token))); + Mockito.when(request.getHeaders()).thenReturn(headers); + return request; + } + + @Test + void verifyValidToken() throws IOException { + final String key = "gTVfiF6A0pB70A3UP1EahpoR6LId9DdNadIkYNygK5Z8lpeJIpw9vN0jZ6fdsfeuV9KIg9gVLkCHIPj6FHW5Q9AvpOoGZO3h"; + final JwtTokenAuthFilter validator = new JwtTokenAuthFilter(new JwtSecret(key)); + final ContainerRequest mockedRequest = mockRequest("Bearer " + generateToken(key)); + validator.filter(mockedRequest); + Mockito.verify(mockedRequest, never()).abortWith(Mockito.any()); + } + + @Test + void verifyNoHeaderProvided() throws IOException { + final String key = "gTVfiF6A0pB70A3UP1EahpoR6LId9DdNadIkYNygK5Z8lpeJIpw9vN0jZ6fdsfeuV9KIg9gVLkCHIPj6FHW5Q9AvpOoGZO3h"; + final JwtTokenAuthFilter validator = new JwtTokenAuthFilter(new JwtSecret(key)); + final ContainerRequest mockedRequest = mockRequest(null); + validator.filter(mockedRequest); + Mockito.verify(mockedRequest, atLeastOnce()).abortWith(Mockito.any()); + } + + @Test + void verifyInvalidToken() throws IOException { + final String generationKey = "gTVfiF6A0pB70A3UP1EahpoR6LId9DdNadIkYNygK5Z8lpeJIpw9vN0jZ6fdsfeuV9KIg9gVLkCHIPj6FHW5Q9AvpOoGZO3h"; + final String verificationKey = "n51wcO3jn8w3JNyGgKc7k1fTCr1FWvGg7ODfQOyBT2fizBrCVsRJg2GsbYGLNejfi3QsKaqJgo3zAWMuAZhJznuizHZpv92S"; + final JwtTokenAuthFilter validator = new JwtTokenAuthFilter(new JwtSecret(verificationKey)); + + final ContainerRequest mockedRequest = mockRequest("Bearer " + generateToken(generationKey)); + validator.filter(mockedRequest); + Mockito.verify(mockedRequest, atLeastOnce()).abortWith(Mockito.any()); + } + + @Test + void testNoneAlgorithm() { + final String key = "gTVfiF6A0pB70A3UP1EahpoR6LId9DdNadIkYNygK5Z8lpeJIpw9vN0jZ6fdsfeuV9KIg9gVLkCHIPj6FHW5Q9AvpOoGZO3h"; + final JwtTokenAuthFilter validator = new JwtTokenAuthFilter(new JwtSecret(key)); + Assertions.assertThatThrownBy(() -> validator.verifyToken(removeSignature(generateToken(key)))) + .isInstanceOf(TokenVerificationException.class) + .hasMessageContaining("Token format/configuration is not supported"); + } + + private String removeSignature(String token) { + final String header = Base64.getEncoder() + .encodeToString("{\"alg\": \"none\"}" + .getBytes(StandardCharsets.UTF_8)); + + return header + token.substring(token.indexOf('.'), token.lastIndexOf('.') + 1); + } + + @Nonnull + private static String generateToken(String signingKey) { + return IndexerJwtAuthTokenProvider.createToken(new JwtSecret(signingKey), Duration.seconds(180)); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/integration/DataNodePluginsIT.java b/data-node/src/test/java/org/graylog/datanode/integration/DataNodePluginsIT.java new file mode 100644 index 000000000000..2a79f4e1dfb9 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/integration/DataNodePluginsIT.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.integration; + +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.WaitStrategies; +import org.graylog.datanode.testinfra.DatanodeContainerizedBackend; +import org.graylog.datanode.testinfra.DatanodeTestExtension; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static io.restassured.RestAssured.given; + +@ExtendWith(DatanodeTestExtension.class) +public class DataNodePluginsIT { + private static final Logger LOG = LoggerFactory.getLogger(DataNodePluginsIT.class); + private final DatanodeContainerizedBackend backend; + + public DataNodePluginsIT(DatanodeContainerizedBackend backend) { + this.backend = backend; + } + + @Test + void ensureUnneededPluginsAreNotLoaded() throws Exception { + final var opensearchRestPort = backend.getOpensearchRestPort(); + final var baseUrl = "http://localhost:" + opensearchRestPort; + try { + waitForOpensearch(baseUrl); + + given() + .get(baseUrl + "/_cat/plugins") + .then() + .statusCode(200) + .body( + Matchers.not(Matchers.containsString("opensearch-alerting")), + Matchers.not(Matchers.containsString("opensearch-custom-codecs")), + Matchers.not(Matchers.containsString("opensearch-geospatial")), + Matchers.not(Matchers.containsString("opensearch-knn")), + Matchers.not(Matchers.containsString("opensearch-neural-search")), + Matchers.not(Matchers.containsString("opensearch-notifications")), + Matchers.not(Matchers.containsString("opensearch-notifications-core")), + Matchers.not(Matchers.containsString("opensearch-performance-analyzer")), + Matchers.not(Matchers.containsString("opensearch-reports-scheduler")), + Matchers.not(Matchers.containsString("opensearch-security-analytics")), + Matchers.not(Matchers.containsString("opensearch-sql")) + ); + } catch (Exception exception) { + LOG.error("DataNode Container logs follow:\n" + backend.getLogs()); + throw exception; + } + } + + private void waitForOpensearch(String baseUrl) throws ExecutionException, RetryException { + final var retryer = RetryerBuilder.newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(1, TimeUnit.SECONDS)) + .retryIfException(e -> e instanceof IOException) + .build(); + + retryer.call(() -> given() + .get(baseUrl) + .then() + .statusCode(200)); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/integration/DatanodeClusterIT.java b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeClusterIT.java new file mode 100644 index 000000000000..2c2fa55317b0 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeClusterIT.java @@ -0,0 +1,267 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.integration; + +import com.github.rholder.retry.RetryException; +import jakarta.validation.constraints.NotNull; +import org.apache.commons.lang3.RandomStringUtils; +import org.graylog.datanode.testinfra.DatanodeContainerizedBackend; +import org.graylog.security.certutil.csr.FilesystemKeystoreInformation; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.mongodb.MongoDBTestService; +import org.graylog.testing.restoperations.DatanodeOpensearchWait; +import org.graylog.testing.restoperations.DatanodeRestApiWait; +import org.graylog.testing.restoperations.DatanodeStatusChangeOperation; +import org.graylog.testing.restoperations.OpensearchTestIndexCreation; +import org.graylog.testing.restoperations.RestOperationParameters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.Network; + +import java.io.IOException; +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.stream.Stream; + +import static org.graylog.datanode.testinfra.DatanodeContainerizedBackend.IMAGE_WORKING_DIR; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class DatanodeClusterIT { + private static final Logger LOG = LoggerFactory.getLogger(DatanodeClusterIT.class); + + private DatanodeContainerizedBackend nodeA; + private DatanodeContainerizedBackend nodeB; + private DatanodeContainerizedBackend nodeC; + + @TempDir + static Path tempDir; + private String hostnameNodeA; + private KeyStore trustStore; + private FilesystemKeystoreInformation ca; + private Network network; + private MongoDBTestService mongoDBTestService; + + @BeforeEach + void setUp() throws GeneralSecurityException, IOException { + // first generate a self-signed CA + ca = DatanodeSecurityTestUtils.generateCa(tempDir); + + trustStore = DatanodeSecurityTestUtils.buildTruststore(ca); + + hostnameNodeA = "graylog-datanode-host-" + RandomStringUtils.random(8, "0123456789abcdef"); + final FilesystemKeystoreInformation transportNodeA = DatanodeSecurityTestUtils.generateTransportCert(tempDir, ca, hostnameNodeA); + final FilesystemKeystoreInformation httpNodeA = DatanodeSecurityTestUtils.generateHttpCert(tempDir, ca, hostnameNodeA); + + this.network = Network.newNetwork(); + this.mongoDBTestService = MongoDBTestService.create(MongodbServer.DEFAULT_VERSION, network); + this.mongoDBTestService.start(); + + nodeA = createDatanodeContainer( + network, + mongoDBTestService, + hostnameNodeA, + transportNodeA, + httpNodeA + ); + + + final String hostnameNodeB = "graylog-datanode-host-" + RandomStringUtils.random(8, "0123456789abcdef"); + final FilesystemKeystoreInformation transportNodeB = DatanodeSecurityTestUtils.generateTransportCert(tempDir, ca, hostnameNodeB); + final FilesystemKeystoreInformation httpNodeB = DatanodeSecurityTestUtils.generateHttpCert(tempDir, ca, hostnameNodeB); + + nodeB = createDatanodeContainer( + network, + mongoDBTestService, + hostnameNodeB, + transportNodeB, + httpNodeB + ); + + Stream.of(nodeA, nodeB).parallel().forEach(DatanodeContainerizedBackend::start); + } + + @AfterEach + void tearDown() { + if (nodeB != null) { + nodeB.stop(); + } + if (nodeA != null) { + nodeA.stop(); + } + if (nodeC != null) { + nodeC.stop(); + } + mongoDBTestService.close(); + network.close(); + } + + @Test + void testClusterFormation() throws ExecutionException, RetryException { + waitForNodesCount(2); + } + + @Test + void testAddingNodeToExistingCluster() throws ExecutionException, RetryException { + + final String hostnameNodeC = "graylog-datanode-host-" + RandomStringUtils.random(8, "0123456789abcdef"); + final FilesystemKeystoreInformation transportNodeC = DatanodeSecurityTestUtils.generateTransportCert(tempDir, ca, hostnameNodeC); + final FilesystemKeystoreInformation httpNodeC = DatanodeSecurityTestUtils.generateHttpCert(tempDir, ca, hostnameNodeC); + + nodeC = createDatanodeContainer( + network, mongoDBTestService, + hostnameNodeC, + transportNodeC, + httpNodeC + ); + + nodeC.start(); + waitForNodesCount(3); + nodeC.stop(); + waitForNodesCount(2); + } + + @Test + void testRemovingNodeReallocatesShards() throws ExecutionException, RetryException { + + final String hostnameNodeC = "graylog-datanode-host-" + RandomStringUtils.random(8, "0123456789abcdef"); + final FilesystemKeystoreInformation transportNodeC = DatanodeSecurityTestUtils.generateTransportCert(tempDir, ca, hostnameNodeC); + final FilesystemKeystoreInformation httpNodeC = DatanodeSecurityTestUtils.generateHttpCert(tempDir, ca, hostnameNodeC); + + nodeC = createDatanodeContainer( + network, mongoDBTestService, + hostnameNodeC, + transportNodeC, + httpNodeC + ); + + nodeC.start(); + waitForNodesCount(3); + + OpensearchTestIndexCreation osIndexClient = new OpensearchTestIndexCreation(RestOperationParameters.builder() + .port(nodeA.getOpensearchRestPort()) + .truststore(trustStore) + .jwtTokenProvider(DatanodeContainerizedBackend.JWT_AUTH_TOKEN_PROVIDER) + .build()); + + // create index and get primary and replica shard node + osIndexClient.createIndex(); + List shardNodes = osIndexClient.getShardNodes(); + Assertions.assertEquals(shardNodes.size(), 2); + + List nodes = List.of(nodeA, nodeB, nodeC); + final Optional primary = nodes.stream().filter(n -> shardNodes.get(0).equals(n.getNodeName())).findFirst(); + assertTrue(primary.isPresent()); + final Optional replica = nodes.stream().filter(n -> shardNodes.get(1).equals(n.getNodeName())).findFirst(); + assertTrue(replica.isPresent()); + + // remove node for primary shard, waiting for it to be in AVAILABLE state first + final RestOperationParameters datanodeRestParameters = RestOperationParameters.builder() + .port(primary.get().getDatanodeRestPort()) + .truststore(trustStore) + .jwtTokenProvider(DatanodeContainerizedBackend.JWT_AUTH_TOKEN_PROVIDER) + .build(); + new DatanodeRestApiWait(datanodeRestParameters) + .waitForAvailableStatus(); + new DatanodeStatusChangeOperation(datanodeRestParameters) + .triggerNodeRemoval(); + + + // check that primary shard node is gone and there are still a primary and a secondary + waitForNodesCount(replica.get(), 2); + osIndexClient = new OpensearchTestIndexCreation(RestOperationParameters.builder() + .port(replica.get().getOpensearchRestPort()) + .truststore(trustStore) + .jwtTokenProvider(DatanodeContainerizedBackend.JWT_AUTH_TOKEN_PROVIDER) + .build()); + List newShardNodes = osIndexClient.getShardNodes(); + Assertions.assertEquals(newShardNodes.size(), 2); + Assertions.assertFalse(newShardNodes.contains(primary.get().getNodeName())); + } + + @NotNull + private DatanodeContainerizedBackend createDatanodeContainer(Network network, + MongoDBTestService mongodb, + String hostname, + FilesystemKeystoreInformation transportKeystore, + FilesystemKeystoreInformation httpKeystore) { + return new DatanodeContainerizedBackend( + network, + mongodb, + hostname, + datanodeContainer -> { + datanodeContainer.withNetwork(network); + datanodeContainer.withEnv("GRAYLOG_DATANODE_PASSWORD_SECRET", DatanodeContainerizedBackend.SIGNING_SECRET); + datanodeContainer.withEnv("GRAYLOG_DATANODE_INITIAL_CLUSTER_MANAGER_NODES", hostnameNodeA); + datanodeContainer.withEnv("GRAYLOG_DATANODE_OPENSEARCH_DISCOVERY_SEED_HOSTS", hostnameNodeA + ":9300"); + + datanodeContainer.withFileSystemBind(transportKeystore.location().toAbsolutePath().toString(), IMAGE_WORKING_DIR + "/config/datanode-transport-certificates.p12"); + datanodeContainer.withFileSystemBind(httpKeystore.location().toAbsolutePath().toString(), IMAGE_WORKING_DIR + "/config/datanode-https-certificates.p12"); + + // configure transport security + datanodeContainer.withEnv("GRAYLOG_DATANODE_TRANSPORT_CERTIFICATE", "datanode-transport-certificates.p12"); + datanodeContainer.withEnv("GRAYLOG_DATANODE_TRANSPORT_CERTIFICATE_PASSWORD", new String(transportKeystore.password())); + datanodeContainer.withEnv("GRAYLOG_DATANODE_INSECURE_STARTUP", "false"); + + // configure http security + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_CERTIFICATE", "datanode-https-certificates.p12"); + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_CERTIFICATE_PASSWORD", new String(httpKeystore.password())); + + // this is the interface that we bind opensearch to. It must be 0.0.0.0 if we want + // to be able to reach opensearch from outside the container and docker network (true?) + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_BIND_ADDRESS", "0.0.0.0"); + + // HOSTNAME is used to generate the SSL certificates and to communicate inside the + // container and docker network, where we do the hostname validation. + datanodeContainer.withCreateContainerCmdModifier(createContainerCmd -> createContainerCmd.withName(hostname)); + datanodeContainer.withEnv("GRAYLOG_DATANODE_HOSTNAME", hostname); + datanodeContainer.withEnv("GRAYLOG_DATANODE_MONGODB_URI", mongodb.internalUri()); + }); + } + + + private void waitForNodesCount(final int countOfNodes) throws ExecutionException, RetryException { + waitForNodesCount(nodeA, countOfNodes); + } + + private void waitForNodesCount(DatanodeContainerizedBackend node, final int countOfNodes) throws ExecutionException, RetryException { + try { + new DatanodeOpensearchWait(RestOperationParameters.builder() + .port(node.getOpensearchRestPort()) + .truststore(trustStore) + .jwtTokenProvider(DatanodeContainerizedBackend.JWT_AUTH_TOKEN_PROVIDER) + .build()) + .waitForNodesCount(countOfNodes); + + } catch (Exception retryException) { + LOG.error("DataNode Container logs from node A follow:\n" + nodeA.getLogs()); + LOG.error("DataNode Container logs from node B follow:\n" + nodeB.getLogs()); + if (nodeC != null) { + LOG.error("DataNode Container logs from node C follow:\n" + nodeC.getLogs()); + } + throw retryException; + } + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/integration/DatanodeLifecycleIT.java b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeLifecycleIT.java new file mode 100644 index 000000000000..cec76d703cce --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeLifecycleIT.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.integration; + +import org.apache.commons.lang3.RandomStringUtils; +import org.graylog.datanode.testinfra.DatanodeContainerizedBackend; +import org.graylog.security.certutil.csr.FilesystemKeystoreInformation; +import org.graylog.testing.restoperations.DatanodeRestApiWait; +import org.graylog.testing.restoperations.DatanodeStatusChangeOperation; +import org.graylog.testing.restoperations.RestOperationParameters; +import org.graylog2.plugin.Tools; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.KeyStore; + +import static org.graylog.datanode.testinfra.DatanodeContainerizedBackend.IMAGE_WORKING_DIR; + +public class DatanodeLifecycleIT { + private static final Logger LOG = LoggerFactory.getLogger(DatanodeLifecycleIT.class); + + @TempDir + static Path tempDir; + private DatanodeContainerizedBackend backend; + private KeyStore trustStore; + private String containerHostname; + + + @BeforeEach + void setUp() throws IOException, GeneralSecurityException { + containerHostname = "graylog-datanode-host-" + RandomStringUtils.random(8, "0123456789abcdef"); + // first generate a self-signed CA + FilesystemKeystoreInformation ca = DatanodeSecurityTestUtils.generateCa(tempDir); + trustStore = DatanodeSecurityTestUtils.buildTruststore(ca); + + // use the CA to generate transport certificate keystore + final FilesystemKeystoreInformation transportCert = DatanodeSecurityTestUtils.generateTransportCert(tempDir, ca, containerHostname); + // use the CA to generate HTTP certificate keystore + final FilesystemKeystoreInformation httpCert = DatanodeSecurityTestUtils.generateHttpCert(tempDir, ca, containerHostname, Tools.getLocalCanonicalHostname()); + + backend = new DatanodeContainerizedBackend(containerHostname, datanodeContainer -> { + // provide the keystore files to the docker container + datanodeContainer.withFileSystemBind(transportCert.location().toAbsolutePath().toString(), IMAGE_WORKING_DIR + "/config/datanode-transport-certificates.p12"); + datanodeContainer.withFileSystemBind(httpCert.location().toAbsolutePath().toString(), IMAGE_WORKING_DIR + "/config/datanode-https-certificates.p12"); + + // configure transport security + datanodeContainer.withEnv("GRAYLOG_DATANODE_TRANSPORT_CERTIFICATE", "datanode-transport-certificates.p12"); + datanodeContainer.withEnv("GRAYLOG_DATANODE_TRANSPORT_CERTIFICATE_PASSWORD", new String(transportCert.password())); + datanodeContainer.withEnv("GRAYLOG_DATANODE_INSECURE_STARTUP", "false"); + + // configure http security + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_CERTIFICATE", "datanode-https-certificates.p12"); + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_CERTIFICATE_PASSWORD", new String(httpCert.password())); + + // this is the interface that we bind opensearch to. It must be 0.0.0.0 if we want + // to be able to reach opensearch from outside the container and docker network (true?) + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_BIND_ADDRESS", "0.0.0.0"); + + // HOSTNAME is used to generate the SSL certificates and to communicate inside the + // container and docker network, where we do the hostname validation. + datanodeContainer.withCreateContainerCmdModifier(createContainerCmd -> createContainerCmd.withName(containerHostname)); + datanodeContainer.withEnv("GRAYLOG_DATANODE_HOSTNAME", containerHostname); + + datanodeContainer.withEnv("GRAYLOG_DATANODE_SINGLE_NODE_ONLY", "true"); + }).start(); + } + + @AfterEach + void tearDown() { + backend.stop(); + } + + @Test + void testRestartByEventBus() { + final RestOperationParameters restParameters = RestOperationParameters.builder() + .port(backend.getDatanodeRestPort()) + .truststore(trustStore) + .jwtTokenProvider(DatanodeContainerizedBackend.JWT_AUTH_TOKEN_PROVIDER) + .build(); + final DatanodeRestApiWait waitApi = new DatanodeRestApiWait(restParameters); + final DatanodeStatusChangeOperation statusApi = new DatanodeStatusChangeOperation(restParameters); + + try { + waitApi.waitForAvailableStatus(); + statusApi.triggerNodeStop(); + waitApi.waitForStoppedStatus(); + statusApi.triggerNodeStart(); + waitApi.waitForAvailableStatus(); + } catch (Exception e) { + LOG.warn(backend.getLogs()); + } + + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSearchableSnapshotsIT.java b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSearchableSnapshotsIT.java new file mode 100644 index 000000000000..30b2ea68b0f2 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSearchableSnapshotsIT.java @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.integration; + +import com.github.rholder.retry.RetryException; +import io.restassured.http.ContentType; +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.restoperations.DatanodeOpensearchWait; +import org.graylog.testing.restoperations.RestOperationParameters; +import org.graylog.datanode.testinfra.DatanodeContainerizedBackend; +import org.graylog.testing.completebackend.S3MinioContainer; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.mongodb.MongoDBTestService; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.Network; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.CreateBucketRequest; + +import java.util.concurrent.ExecutionException; + +import static io.restassured.RestAssured.given; + +public class DatanodeSearchableSnapshotsIT { + + + private Network network; + private MongoDBTestService mongoDB; + private S3MinioContainer s3Container; + private DatanodeContainerizedBackend datanode; + + @BeforeEach + void setUp() { + network = Network.newNetwork(); + mongoDB = MongoDBTestService.create(MongodbServer.DEFAULT_VERSION, network); + mongoDB.start(); + s3Container = new S3MinioContainer(network); + s3Container.start(); + + datanode = new DatanodeContainerizedBackend(network, mongoDB, "datanode", datanodeContainer -> { + datanodeContainer.withEnv("GRAYLOG_DATANODE_S3_CLIENT_DEFAULT_ACCESS_KEY", s3Container.getAccessKey()); + datanodeContainer.withEnv("GRAYLOG_DATANODE_S3_CLIENT_DEFAULT_SECRET_KEY", s3Container.getSecretKey()); + datanodeContainer.withEnv("GRAYLOG_DATANODE_S3_CLIENT_DEFAULT_ENDPOINT", s3Container.getInternalURI().toString()); + }); + + try { + datanode.start(); + } catch (Exception e) { + System.out.println(datanode.getLogs()); + } + } + + @AfterEach + void tearDown() { + datanode.stop(); + s3Container.stop(); + mongoDB.close(); + network.close(); + } + + @Test + void testSnapshotCreation() throws ExecutionException, RetryException { + + final var opensearchRestPort = datanode.getOpensearchRestPort(); + final var baseUrl = "http://localhost:" + opensearchRestPort; + + // give opensearch time to start everything + waitForNode(opensearchRestPort); + + // verify that the s3-repository plugin is installed + verifyInstalledPlugin(baseUrl, "repository-s3"); + + final String bucketName = "my-s3-bucket"; + final String repositoryName = "my-s3-repository"; + createS3Bucket(bucketName); + createRepository(baseUrl, repositoryName, bucketName); + + createSnapshot(baseUrl, repositoryName) + .body("snapshot.state", Matchers.equalTo("SUCCESS")); + } + + private static ValidatableResponse createRepository(String baseUrl, String repositoryName, String bucketName) { + final String req = """ + { + "type": "s3", + "settings": { + "bucket": "%s", + "base_path": "my/snapshot/directory" + } + } + """.formatted(bucketName); + + return given() + .body(req) + .contentType(ContentType.JSON) + .put(baseUrl + "/_snapshot/" + repositoryName) + .then() + .statusCode(200) + .body("acknowledged", Matchers.equalTo(true)); + } + + private static ValidatableResponse createSnapshot(String baseUrl, String repositoryName) { + return given().put(baseUrl + "/_snapshot/" + repositoryName + "/1?wait_for_completion=true").then().statusCode(200); + } + + private void createS3Bucket(String bucketName) { + try (final S3Client client = s3Container.getClient()) { + client.createBucket(CreateBucketRequest.builder().bucket(bucketName).build()); + } + } + + private static void verifyInstalledPlugin(String baseUrl, String pluginName) { + given().get(baseUrl + "/_cat/plugins").then().statusCode(200).body(Matchers.containsString(pluginName)); + } + + private void waitForNode(Integer opensearchRestPort) throws ExecutionException, RetryException { + // this instance is not using any security, no truststore or jwt tokens needed + new DatanodeOpensearchWait(RestOperationParameters.builder().port(opensearchRestPort).build()).waitForNodesCount(1); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSecuritySetupIT.java b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSecuritySetupIT.java new file mode 100644 index 000000000000..d0f0ff8a914a --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSecuritySetupIT.java @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.integration; + +import com.github.rholder.retry.RetryException; +import io.restassured.response.ValidatableResponse; +import org.apache.commons.lang3.RandomStringUtils; +import org.graylog.datanode.testinfra.DatanodeContainerizedBackend; +import org.graylog.security.certutil.csr.FilesystemKeystoreInformation; +import org.graylog.testing.restoperations.DatanodeRestApiWait; +import org.graylog.testing.restoperations.RestOperationParameters; +import org.graylog2.plugin.Tools; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.util.concurrent.ExecutionException; + +import static io.restassured.RestAssured.given; +import static org.graylog.datanode.testinfra.DatanodeContainerizedBackend.IMAGE_WORKING_DIR; + +public class DatanodeSecuritySetupIT { + private static final Logger LOG = LoggerFactory.getLogger(DatanodeSecuritySetupIT.class); + + @TempDir + static Path tempDir; + private DatanodeContainerizedBackend backend; + private KeyStore trustStore; + private String containerHostname; + + @BeforeEach + void setUp() throws IOException, GeneralSecurityException { + containerHostname = "graylog-datanode-host-" + RandomStringUtils.random(8, "0123456789abcdef"); + // first generate a self-signed CA + FilesystemKeystoreInformation ca = DatanodeSecurityTestUtils.generateCa(tempDir); + trustStore = DatanodeSecurityTestUtils.buildTruststore(ca); + + // use the CA to generate transport certificate keystore + final FilesystemKeystoreInformation transportCert = DatanodeSecurityTestUtils.generateTransportCert(tempDir, ca, containerHostname); + // use the CA to generate HTTP certificate keystore + final FilesystemKeystoreInformation httpCert = DatanodeSecurityTestUtils.generateHttpCert(tempDir, ca, containerHostname, Tools.getLocalCanonicalHostname()); + + backend = new DatanodeContainerizedBackend(containerHostname, datanodeContainer -> { + // provide the keystore files to the docker container + datanodeContainer.withFileSystemBind(transportCert.location().toAbsolutePath().toString(), IMAGE_WORKING_DIR + "/config/datanode-transport-certificates.p12"); + datanodeContainer.withFileSystemBind(httpCert.location().toAbsolutePath().toString(), IMAGE_WORKING_DIR + "/config/datanode-https-certificates.p12"); + + // configure transport security + datanodeContainer.withEnv("GRAYLOG_DATANODE_TRANSPORT_CERTIFICATE", "datanode-transport-certificates.p12"); + datanodeContainer.withEnv("GRAYLOG_DATANODE_TRANSPORT_CERTIFICATE_PASSWORD", new String(transportCert.password())); + datanodeContainer.withEnv("GRAYLOG_DATANODE_INSECURE_STARTUP", "false"); + + // configure http security + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_CERTIFICATE", "datanode-https-certificates.p12"); + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_CERTIFICATE_PASSWORD", new String(httpCert.password())); + + // this is the interface that we bind opensearch to. It must be 0.0.0.0 if we want + // to be able to reach opensearch from outside the container and docker network (true?) + datanodeContainer.withEnv("GRAYLOG_DATANODE_HTTP_BIND_ADDRESS", "0.0.0.0"); + + // HOSTNAME is used to generate the SSL certificates and to communicate inside the + // container and docker network, where we do the hostname validation. + datanodeContainer.withCreateContainerCmdModifier(createContainerCmd -> createContainerCmd.withName(containerHostname)); + datanodeContainer.withEnv("GRAYLOG_DATANODE_HOSTNAME", containerHostname); + + datanodeContainer.withEnv("GRAYLOG_DATANODE_SINGLE_NODE_ONLY", "true"); + }).start(); + } + + @AfterEach + void tearDown() { + backend.stop(); + } + + @Test + void testSecuredSetup() throws ExecutionException, RetryException { + waitForOpensearchAvailableStatus(backend.getDatanodeRestPort(), trustStore); + + try { + given().header("Authorization", DatanodeContainerizedBackend.JWT_AUTH_TOKEN_PROVIDER.get()) + .trustStore(trustStore) + .get("https://localhost:" + backend.getOpensearchRestPort()) + .then().assertThat() + .body("name", Matchers.equalTo(containerHostname)) + .body("cluster_name", Matchers.equalTo("datanode-cluster")); + } catch (Exception ex) { + LOG.error("Error connecting to OpenSearch in the DataNode, showing logs:\n{}", backend.getLogs()); + throw ex; + } + } + + private ValidatableResponse waitForOpensearchAvailableStatus(final Integer datanodeRestPort, final KeyStore trustStore) throws ExecutionException, RetryException { + + try { + return new DatanodeRestApiWait( + RestOperationParameters.builder() + .port(datanodeRestPort) + .truststore(trustStore) + .jwtTokenProvider(DatanodeContainerizedBackend.JWT_AUTH_TOKEN_PROVIDER) + .build()) + .waitForAvailableStatus(); + } catch (Exception ex) { + LOG.error("Error starting the DataNode, showing logs:\n" + backend.getLogs()); + throw ex; + } + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSecurityTestUtils.java b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSecurityTestUtils.java new file mode 100644 index 000000000000..997cde2a7c32 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeSecurityTestUtils.java @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.integration; + +import org.apache.commons.lang3.RandomStringUtils; +import org.graylog.security.certutil.CertutilCa; +import org.graylog.security.certutil.CertutilCert; +import org.graylog.security.certutil.CertutilHttp; +import org.graylog.security.certutil.console.TestableConsole; +import org.graylog.security.certutil.csr.FilesystemKeystoreInformation; +import org.graylog.security.certutil.csr.KeystoreInformation; +import org.graylog2.security.TruststoreCreator; + +import java.io.IOException; +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.util.Enumeration; + +public class DatanodeSecurityTestUtils { + public static KeyStore buildTruststore(KeystoreInformation ca) throws IOException, GeneralSecurityException { + final TruststoreCreator truststoreCreator = TruststoreCreator.newEmpty(); + truststoreCreator.addCertificates(ca); + return truststoreCreator.getTruststore(); + } + + public static FilesystemKeystoreInformation generateCa(Path dir) { + final Path certPath = dir.resolve("test-ca.p12"); + final String password = RandomStringUtils.randomAlphabetic(10); + final TestableConsole input = TestableConsole.empty().silent() + .register(CertutilCa.PROMPT_ENTER_CA_PASSWORD, password); + final CertutilCa command = new CertutilCa(certPath.toAbsolutePath().toString(), input); + command.run(); + return new FilesystemKeystoreInformation(certPath, password.toCharArray()); + } + + public static FilesystemKeystoreInformation generateTransportCert(Path dir, FilesystemKeystoreInformation ca, String... containerHostnames) { + final Path transportPath = dir.resolve("transport-" + RandomStringUtils.randomAlphabetic(10) + ".p12"); + final String password = RandomStringUtils.randomAlphabetic(10); + TestableConsole inputCert = TestableConsole.empty().silent() + .register(CertutilCert.PROMPT_ENTER_CA_PASSWORD, new String(ca.password())) + .register(CertutilCert.PROMPT_ENTER_CERTIFICATE_PASSWORD, password) + .register(CertutilCert.PROMPT_ENTER_CERT_ALTERNATIVE_NAMES, String.join(",", containerHostnames)); + CertutilCert certutilCert = new CertutilCert( + ca.location().toAbsolutePath().toString(), + transportPath.toAbsolutePath().toString(), + inputCert); + certutilCert.run(); + return new FilesystemKeystoreInformation(transportPath, password.toCharArray()); + } + + public static FilesystemKeystoreInformation generateHttpCert(Path dir, FilesystemKeystoreInformation ca, String... containerHostnames) { + final Path httpPath = dir.resolve("http-" + RandomStringUtils.randomAlphabetic(10) + ".p12"); + final String password = RandomStringUtils.randomAlphabetic(10); + TestableConsole inputHttp = TestableConsole.empty().silent() + .register(CertutilHttp.PROMPT_USE_OWN_CERTIFICATE_AUTHORITY, "n") + .register(CertutilHttp.PROMPT_ENTER_CA_PASSWORD, new String(ca.password())) + .register(CertutilHttp.PROMPT_ENTER_CERTIFICATE_VALIDITY_IN_DAYS, "90") + .register(CertutilHttp.PROMPT_ENTER_CERTIFICATE_ALTERNATIVE_NAMES, String.join(",", containerHostnames)) + .register(CertutilHttp.PROMPT_ENTER_HTTP_CERTIFICATE_PASSWORD, password); + CertutilHttp certutilCert = new CertutilHttp( + ca.location().toAbsolutePath().toString(), + httpPath.toAbsolutePath().toString(), + inputHttp); + certutilCert.run(); + return new FilesystemKeystoreInformation(httpPath, password.toCharArray()); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/integration/DatanodeStartupIT.java b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeStartupIT.java new file mode 100644 index 000000000000..1eeccd5fadf7 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/integration/DatanodeStartupIT.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.integration; + +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.Retryer; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import io.restassured.RestAssured; +import io.restassured.response.ValidatableResponse; +import org.apache.http.NoHttpResponseException; +import org.graylog.datanode.testinfra.DatanodeContainerizedBackend; +import org.graylog.datanode.testinfra.DatanodeTestExtension; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.SocketException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +@ExtendWith(DatanodeTestExtension.class) +public class DatanodeStartupIT { + private static final Logger LOG = LoggerFactory.getLogger(DatanodeStartupIT.class); + + private final DatanodeContainerizedBackend backend; + + public DatanodeStartupIT(DatanodeContainerizedBackend backend) { + this.backend = backend; + } + + @Test + void testDatanodeStartup() throws ExecutionException, RetryException { + + try { + final Retryer retryer = RetryerBuilder.newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(1, TimeUnit.SECONDS)) + .withStopStrategy(StopStrategies.stopAfterAttempt(120)) + .retryIfException(input -> input instanceof NoHttpResponseException) + .retryIfException(input -> input instanceof SocketException) + .retryIfResult(input -> !input.extract().body().path("opensearch.node.state").equals("AVAILABLE")) + .build(); + + final Integer datanodeRestApiPort = backend.getDatanodeRestPort(); + retryer.call(() -> this.getStatus(datanodeRestApiPort)) + .assertThat() + .body("opensearch.node.node_name", Matchers.equalTo("node1")) + .body("opensearch.node.process.pid", Matchers.notNullValue()); + + assertInsecuredMethodSecurity(datanodeRestApiPort); + } catch (RetryException retryException) { + LOG.error("DataNode Container logs follow:\n" + backend.getLogs()); + throw retryException; + } + } + + private void assertInsecuredMethodSecurity(Integer mappedPort) { + RestAssured.given() + .delete("http://localhost:" + mappedPort + "/management") + .then() + .statusCode(401); + } + + private ValidatableResponse getStatus(Integer mappedPort) { + return RestAssured.given() + .get("http://localhost:" + mappedPort) + .then(); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/metrics/ClusterStatMetricsCollectorTest.java b/data-node/src/test/java/org/graylog/datanode/metrics/ClusterStatMetricsCollectorTest.java new file mode 100644 index 000000000000..90f50fe84577 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/metrics/ClusterStatMetricsCollectorTest.java @@ -0,0 +1,772 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.metrics; + +import org.graylog.shaded.opensearch2.org.apache.http.HttpEntity; +import org.graylog.shaded.opensearch2.org.opensearch.client.Response; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestClient; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.graylog2.shared.bindings.providers.ObjectMapperProvider; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Arrays; +import java.util.Map; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class ClusterStatMetricsCollectorTest { + + ClusterStatMetricsCollector collector; + @Mock + RestHighLevelClient client; + + @BeforeEach + public void setUp() throws IOException { + Response response = mock(Response.class); + HttpEntity entity = mock(HttpEntity.class); + when(response.getEntity()).thenReturn(entity); + when(entity.getContent()).thenReturn(new ByteArrayInputStream(clusterStatResponse.getBytes(Charset.defaultCharset()))); + RestClient lowLevelClient = mock(RestClient.class); + when(client.getLowLevelClient()).thenReturn(lowLevelClient); + when(lowLevelClient.performRequest(any())).thenReturn(response); + this.collector = new ClusterStatMetricsCollector(client, new ObjectMapperProvider().get()); + } + + @Test + public void getClusterMetrics() { + final Map previousMetrics = Map.of("search_ops", 5); + Map clusterMetrics = collector.getClusterMetrics(previousMetrics); + assertThat(clusterMetrics.get("doc_count")).isEqualTo(6206956); + assertThat(clusterMetrics.get("search_ops")).isEqualTo(13); + assertThat(clusterMetrics.get("search_ops_rate")).isEqualTo(8L); + String[] allMetrics = Arrays.stream(ClusterStatMetrics.values()).map(ClusterStatMetrics::getFieldName).toArray(String[]::new); + assertThat(clusterMetrics).containsKeys(allMetrics); + } + + private final static String clusterStatResponse = """ + { + "_shards": { + "total": 38, + "successful": 38, + "failed": 0 + }, + "_all": { + "primaries": { + "docs": { + "count": 6206956, + "deleted": 3 + }, + "store": { + "size_in_bytes": 1781904103, + "reserved_in_bytes": 0 + }, + "indexing": { + "index_total": 6188311, + "index_time_in_millis": 505860, + "index_current": 0, + "index_failed": 10, + "delete_total": 0, + "delete_time_in_millis": 0, + "delete_current": 0, + "noop_update_total": 0, + "is_throttled": false, + "throttle_time_in_millis": 0, + "doc_status": {} + }, + "get": { + "total": 175, + "time_in_millis": 103, + "exists_total": 173, + "exists_time_in_millis": 103, + "missing_total": 2, + "missing_time_in_millis": 0, + "current": 0 + }, + "search": { + "open_contexts": 0, + "query_total": 11, + "query_time_in_millis": 40, + "query_current": 0, + "fetch_total": 11, + "fetch_time_in_millis": 0, + "fetch_current": 0, + "scroll_total": 0, + "scroll_time_in_millis": 0, + "scroll_current": 0, + "point_in_time_total": 0, + "point_in_time_time_in_millis": 0, + "point_in_time_current": 0, + "suggest_total": 0, + "suggest_time_in_millis": 0, + "suggest_current": 0 + }, + "merges": { + "current": 0, + "current_docs": 0, + "current_size_in_bytes": 0, + "total": 831, + "total_time_in_millis": 246474, + "total_docs": 20378299, + "total_size_in_bytes": 5110146545, + "total_stopped_time_in_millis": 0, + "total_throttled_time_in_millis": 39458, + "total_auto_throttle_in_bytes": 679234600, + "unreferenced_file_cleanups_performed": 0 + }, + "refresh": { + "total": 52698, + "total_time_in_millis": 3326040, + "external_total": 52625, + "external_total_time_in_millis": 3346017, + "listeners": 0 + }, + "flush": { + "total": 68, + "periodic": 66, + "total_time_in_millis": 30243 + }, + "warmer": { + "current": 0, + "total": 7729, + "total_time_in_millis": 165 + }, + "query_cache": { + "memory_size_in_bytes": 0, + "total_count": 0, + "hit_count": 0, + "miss_count": 0, + "cache_size": 0, + "cache_count": 0, + "evictions": 0 + }, + "fielddata": { + "memory_size_in_bytes": 0, + "evictions": 0 + }, + "completion": { + "size_in_bytes": 0 + }, + "segments": { + "count": 93, + "memory_in_bytes": 0, + "terms_memory_in_bytes": 0, + "stored_fields_memory_in_bytes": 0, + "term_vectors_memory_in_bytes": 0, + "norms_memory_in_bytes": 0, + "points_memory_in_bytes": 0, + "doc_values_memory_in_bytes": 0, + "index_writer_memory_in_bytes": 1692740, + "version_map_memory_in_bytes": 144840, + "fixed_bit_set_memory_in_bytes": 0, + "max_unsafe_auto_id_timestamp": -1, + "remote_store": { + "upload": { + "total_upload_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "refresh_size_lag": { + "total_bytes": 0, + "max_bytes": 0 + }, + "max_refresh_time_lag_in_millis": 0, + "total_time_spent_in_millis": 0 + }, + "download": { + "total_download_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "total_time_spent_in_millis": 0 + } + }, + "segment_replication": { + "max_bytes_behind": "0b", + "total_bytes_behind": "0b", + "max_replication_lag": "0s" + }, + "file_sizes": {} + }, + "translog": { + "operations": 1006222, + "size_in_bytes": 833969509, + "uncommitted_operations": 1006222, + "uncommitted_size_in_bytes": 833969509, + "earliest_last_modified_age": 24209, + "remote_store": { + "upload": { + "total_uploads": { + "started": 0, + "failed": 0, + "succeeded": 0 + }, + "total_upload_size": { + "started_bytes": 0, + "failed_bytes": 0, + "succeeded_bytes": 0 + } + } + } + }, + "request_cache": { + "memory_size_in_bytes": 0, + "evictions": 0, + "hit_count": 2, + "miss_count": 9 + }, + "recovery": { + "current_as_source": 0, + "current_as_target": 0, + "throttle_time_in_millis": 0 + } + }, + "total": { + "docs": { + "count": 6206974, + "deleted": 3 + }, + "store": { + "size_in_bytes": 1782014741, + "reserved_in_bytes": 0 + }, + "indexing": { + "index_total": 6189056, + "index_time_in_millis": 507364, + "index_current": 0, + "index_failed": 10, + "delete_total": 0, + "delete_time_in_millis": 0, + "delete_current": 0, + "noop_update_total": 0, + "is_throttled": false, + "throttle_time_in_millis": 0, + "doc_status": {} + }, + "get": { + "total": 206, + "time_in_millis": 205, + "exists_total": 204, + "exists_time_in_millis": 205, + "missing_total": 2, + "missing_time_in_millis": 0, + "current": 0 + }, + "search": { + "open_contexts": 0, + "query_total": 13, + "query_time_in_millis": 96, + "query_current": 0, + "fetch_total": 13, + "fetch_time_in_millis": 0, + "fetch_current": 0, + "scroll_total": 0, + "scroll_time_in_millis": 0, + "scroll_current": 0, + "point_in_time_total": 0, + "point_in_time_time_in_millis": 0, + "point_in_time_current": 0, + "suggest_total": 0, + "suggest_time_in_millis": 0, + "suggest_current": 0 + }, + "merges": { + "current": 0, + "current_docs": 0, + "current_size_in_bytes": 0, + "total": 852, + "total_time_in_millis": 249527, + "total_docs": 20378391, + "total_size_in_bytes": 5110398375, + "total_stopped_time_in_millis": 0, + "total_throttled_time_in_millis": 39458, + "total_auto_throttle_in_bytes": 784092200, + "unreferenced_file_cleanups_performed": 0 + }, + "refresh": { + "total": 52841, + "total_time_in_millis": 3333600, + "external_total": 52708, + "external_total_time_in_millis": 3353472, + "listeners": 0 + }, + "flush": { + "total": 105, + "periodic": 100, + "total_time_in_millis": 45000 + }, + "warmer": { + "current": 0, + "total": 7807, + "total_time_in_millis": 167 + }, + "query_cache": { + "memory_size_in_bytes": 0, + "total_count": 0, + "hit_count": 0, + "miss_count": 0, + "cache_size": 0, + "cache_count": 0, + "evictions": 0 + }, + "fielddata": { + "memory_size_in_bytes": 0, + "evictions": 0 + }, + "completion": { + "size_in_bytes": 0 + }, + "segments": { + "count": 99, + "memory_in_bytes": 0, + "terms_memory_in_bytes": 0, + "stored_fields_memory_in_bytes": 0, + "term_vectors_memory_in_bytes": 0, + "norms_memory_in_bytes": 0, + "points_memory_in_bytes": 0, + "doc_values_memory_in_bytes": 0, + "index_writer_memory_in_bytes": 2168432, + "version_map_memory_in_bytes": 145033, + "fixed_bit_set_memory_in_bytes": 0, + "max_unsafe_auto_id_timestamp": -1, + "remote_store": { + "upload": { + "total_upload_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "refresh_size_lag": { + "total_bytes": 0, + "max_bytes": 0 + }, + "max_refresh_time_lag_in_millis": 0, + "total_time_spent_in_millis": 0 + }, + "download": { + "total_download_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "total_time_spent_in_millis": 0 + } + }, + "segment_replication": { + "max_bytes_behind": "0b", + "total_bytes_behind": "0b", + "max_replication_lag": "0s" + }, + "file_sizes": {} + }, + "translog": { + "operations": 1006848, + "size_in_bytes": 834131527, + "uncommitted_operations": 1006848, + "uncommitted_size_in_bytes": 834131527, + "earliest_last_modified_age": 24209, + "remote_store": { + "upload": { + "total_uploads": { + "started": 0, + "failed": 0, + "succeeded": 0 + }, + "total_upload_size": { + "started_bytes": 0, + "failed_bytes": 0, + "succeeded_bytes": 0 + } + } + } + }, + "request_cache": { + "memory_size_in_bytes": 0, + "evictions": 0, + "hit_count": 2, + "miss_count": 11 + }, + "recovery": { + "current_as_source": 0, + "current_as_target": 0, + "throttle_time_in_millis": 0 + } + } + }, + "indices": { + ".opensearch-observability": { + "uuid": "ATc0YY96SayiN3HoXLo09Q", + "primaries": { + "docs": { + "count": 0, + "deleted": 0 + }, + "store": { + "size_in_bytes": 208, + "reserved_in_bytes": 0 + }, + "indexing": { + "index_total": 0, + "index_time_in_millis": 0, + "index_current": 0, + "index_failed": 0, + "delete_total": 0, + "delete_time_in_millis": 0, + "delete_current": 0, + "noop_update_total": 0, + "is_throttled": false, + "throttle_time_in_millis": 0, + "doc_status": {} + }, + "get": { + "total": 0, + "time_in_millis": 0, + "exists_total": 0, + "exists_time_in_millis": 0, + "missing_total": 0, + "missing_time_in_millis": 0, + "current": 0 + }, + "search": { + "open_contexts": 0, + "query_total": 0, + "query_time_in_millis": 0, + "query_current": 0, + "fetch_total": 0, + "fetch_time_in_millis": 0, + "fetch_current": 0, + "scroll_total": 0, + "scroll_time_in_millis": 0, + "scroll_current": 0, + "point_in_time_total": 0, + "point_in_time_time_in_millis": 0, + "point_in_time_current": 0, + "suggest_total": 0, + "suggest_time_in_millis": 0, + "suggest_current": 0 + }, + "merges": { + "current": 0, + "current_docs": 0, + "current_size_in_bytes": 0, + "total": 0, + "total_time_in_millis": 0, + "total_docs": 0, + "total_size_in_bytes": 0, + "total_stopped_time_in_millis": 0, + "total_throttled_time_in_millis": 0, + "total_auto_throttle_in_bytes": 20971520, + "unreferenced_file_cleanups_performed": 0 + }, + "refresh": { + "total": 5, + "total_time_in_millis": 0, + "external_total": 2, + "external_total_time_in_millis": 1, + "listeners": 0 + }, + "flush": { + "total": 1, + "periodic": 1, + "total_time_in_millis": 0 + }, + "warmer": { + "current": 0, + "total": 1, + "total_time_in_millis": 0 + }, + "query_cache": { + "memory_size_in_bytes": 0, + "total_count": 0, + "hit_count": 0, + "miss_count": 0, + "cache_size": 0, + "cache_count": 0, + "evictions": 0 + }, + "fielddata": { + "memory_size_in_bytes": 0, + "evictions": 0 + }, + "completion": { + "size_in_bytes": 0 + }, + "segments": { + "count": 0, + "memory_in_bytes": 0, + "terms_memory_in_bytes": 0, + "stored_fields_memory_in_bytes": 0, + "term_vectors_memory_in_bytes": 0, + "norms_memory_in_bytes": 0, + "points_memory_in_bytes": 0, + "doc_values_memory_in_bytes": 0, + "index_writer_memory_in_bytes": 0, + "version_map_memory_in_bytes": 0, + "fixed_bit_set_memory_in_bytes": 0, + "max_unsafe_auto_id_timestamp": -1, + "remote_store": { + "upload": { + "total_upload_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "refresh_size_lag": { + "total_bytes": 0, + "max_bytes": 0 + }, + "max_refresh_time_lag_in_millis": 0, + "total_time_spent_in_millis": 0 + }, + "download": { + "total_download_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "total_time_spent_in_millis": 0 + } + }, + "segment_replication": { + "max_bytes_behind": "0b", + "total_bytes_behind": "0b", + "max_replication_lag": "0s" + }, + "file_sizes": {} + }, + "translog": { + "operations": 0, + "size_in_bytes": 55, + "uncommitted_operations": 0, + "uncommitted_size_in_bytes": 55, + "earliest_last_modified_age": 18698230, + "remote_store": { + "upload": { + "total_uploads": { + "started": 0, + "failed": 0, + "succeeded": 0 + }, + "total_upload_size": { + "started_bytes": 0, + "failed_bytes": 0, + "succeeded_bytes": 0 + } + } + } + }, + "request_cache": { + "memory_size_in_bytes": 0, + "evictions": 0, + "hit_count": 0, + "miss_count": 0 + }, + "recovery": { + "current_as_source": 0, + "current_as_target": 0, + "throttle_time_in_millis": 0 + } + }, + "total": { + "docs": { + "count": 0, + "deleted": 0 + }, + "store": { + "size_in_bytes": 416, + "reserved_in_bytes": 0 + }, + "indexing": { + "index_total": 0, + "index_time_in_millis": 0, + "index_current": 0, + "index_failed": 0, + "delete_total": 0, + "delete_time_in_millis": 0, + "delete_current": 0, + "noop_update_total": 0, + "is_throttled": false, + "throttle_time_in_millis": 0, + "doc_status": {} + }, + "get": { + "total": 0, + "time_in_millis": 0, + "exists_total": 0, + "exists_time_in_millis": 0, + "missing_total": 0, + "missing_time_in_millis": 0, + "current": 0 + }, + "search": { + "open_contexts": 0, + "query_total": 0, + "query_time_in_millis": 0, + "query_current": 0, + "fetch_total": 0, + "fetch_time_in_millis": 0, + "fetch_current": 0, + "scroll_total": 0, + "scroll_time_in_millis": 0, + "scroll_current": 0, + "point_in_time_total": 0, + "point_in_time_time_in_millis": 0, + "point_in_time_current": 0, + "suggest_total": 0, + "suggest_time_in_millis": 0, + "suggest_current": 0 + }, + "merges": { + "current": 0, + "current_docs": 0, + "current_size_in_bytes": 0, + "total": 0, + "total_time_in_millis": 0, + "total_docs": 0, + "total_size_in_bytes": 0, + "total_stopped_time_in_millis": 0, + "total_throttled_time_in_millis": 0, + "total_auto_throttle_in_bytes": 41943040, + "unreferenced_file_cleanups_performed": 0 + }, + "refresh": { + "total": 8, + "total_time_in_millis": 0, + "external_total": 4, + "external_total_time_in_millis": 1, + "listeners": 0 + }, + "flush": { + "total": 2, + "periodic": 2, + "total_time_in_millis": 0 + }, + "warmer": { + "current": 0, + "total": 2, + "total_time_in_millis": 0 + }, + "query_cache": { + "memory_size_in_bytes": 0, + "total_count": 0, + "hit_count": 0, + "miss_count": 0, + "cache_size": 0, + "cache_count": 0, + "evictions": 0 + }, + "fielddata": { + "memory_size_in_bytes": 0, + "evictions": 0 + }, + "completion": { + "size_in_bytes": 0 + }, + "segments": { + "count": 0, + "memory_in_bytes": 0, + "terms_memory_in_bytes": 0, + "stored_fields_memory_in_bytes": 0, + "term_vectors_memory_in_bytes": 0, + "norms_memory_in_bytes": 0, + "points_memory_in_bytes": 0, + "doc_values_memory_in_bytes": 0, + "index_writer_memory_in_bytes": 0, + "version_map_memory_in_bytes": 0, + "fixed_bit_set_memory_in_bytes": 0, + "max_unsafe_auto_id_timestamp": -1, + "remote_store": { + "upload": { + "total_upload_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "refresh_size_lag": { + "total_bytes": 0, + "max_bytes": 0 + }, + "max_refresh_time_lag_in_millis": 0, + "total_time_spent_in_millis": 0 + }, + "download": { + "total_download_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "total_time_spent_in_millis": 0 + } + }, + "segment_replication": { + "max_bytes_behind": "0b", + "total_bytes_behind": "0b", + "max_replication_lag": "0s" + }, + "file_sizes": {} + }, + "translog": { + "operations": 0, + "size_in_bytes": 110, + "uncommitted_operations": 0, + "uncommitted_size_in_bytes": 110, + "earliest_last_modified_age": 18673677, + "remote_store": { + "upload": { + "total_uploads": { + "started": 0, + "failed": 0, + "succeeded": 0 + }, + "total_upload_size": { + "started_bytes": 0, + "failed_bytes": 0, + "succeeded_bytes": 0 + } + } + } + }, + "request_cache": { + "memory_size_in_bytes": 0, + "evictions": 0, + "hit_count": 0, + "miss_count": 0 + }, + "recovery": { + "current_as_source": 0, + "current_as_target": 0, + "throttle_time_in_millis": 0 + } + } + } + } + } + """; + +} diff --git a/data-node/src/test/java/org/graylog/datanode/metrics/NodeMetricsCollectorTest.java b/data-node/src/test/java/org/graylog/datanode/metrics/NodeMetricsCollectorTest.java new file mode 100644 index 000000000000..77e5338f7df8 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/metrics/NodeMetricsCollectorTest.java @@ -0,0 +1,1077 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.metrics; + +import org.graylog.shaded.opensearch2.org.apache.http.HttpEntity; +import org.graylog.shaded.opensearch2.org.opensearch.client.Response; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestClient; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.graylog2.shared.bindings.providers.ObjectMapperProvider; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Arrays; +import java.util.Map; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class NodeMetricsCollectorTest { + + private final String NODENAME = "datanode1"; + + NodeMetricsCollector collector; + @Mock + RestHighLevelClient client; + + @BeforeEach + public void setUp() throws IOException { + Response response = mock(Response.class); + HttpEntity entity = mock(HttpEntity.class); + when(response.getEntity()).thenReturn(entity); + when(entity.getContent()).thenReturn(new ByteArrayInputStream(nodeStatResponse.getBytes(Charset.defaultCharset()))); + RestClient lowLevelClient = mock(RestClient.class); + when(client.getLowLevelClient()).thenReturn(lowLevelClient); + when(lowLevelClient.performRequest(any())).thenReturn(response); + this.collector = new NodeMetricsCollector(client, new ObjectMapperProvider().get()); + } + + @Test + public void getNodeMetrics() { + Map nodeMetrics = collector.getNodeMetrics(NODENAME); + assertThat(nodeMetrics.get("cpu_load")).isEqualTo(26.4873046875); + assertThat(nodeMetrics.get("disk_free")).isEqualTo(572.1824f); + String[] allMetrics = Arrays.stream(NodeStatMetrics.values()).map(NodeStatMetrics::getFieldName).toArray(String[]::new); + assertThat(nodeMetrics).containsKeys(allMetrics); + } + + + private final static String nodeStatResponse = """ + { + "_nodes": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "cluster_name": "datanode-cluster", + "nodes": { + "qvCZ9yQ5S-OG8IKTnUdhpw": { + "timestamp": 1705681973264, + "name": "datanode1", + "transport_address": "127.0.0.1:9301", + "host": "datanode1", + "ip": "127.0.0.1:9301", + "roles": [ + "cluster_manager", + "data", + "ingest", + "remote_cluster_client" + ], + "attributes": { + "shard_indexing_pressure_enabled": "true" + }, + "indices": { + "docs": { + "count": 60369559, + "deleted": 88 + }, + "store": { + "size_in_bytes": 14300209839, + "reserved_in_bytes": 0 + }, + "indexing": { + "index_total": 244589, + "index_time_in_millis": 24757, + "index_current": 0, + "index_failed": 23, + "delete_total": 59, + "delete_time_in_millis": 279, + "delete_current": 0, + "noop_update_total": 0, + "is_throttled": false, + "throttle_time_in_millis": 0, + "doc_status": { + "2xx": 241582, + "4xx": 50 + } + }, + "get": { + "total": 109, + "time_in_millis": 173, + "exists_total": 91, + "exists_time_in_millis": 173, + "missing_total": 18, + "missing_time_in_millis": 0, + "current": 0 + }, + "search": { + "open_contexts": 0, + "query_total": 176, + "query_time_in_millis": 97, + "query_current": 0, + "fetch_total": 174, + "fetch_time_in_millis": 66, + "fetch_current": 0, + "scroll_total": 0, + "scroll_time_in_millis": 0, + "scroll_current": 0, + "point_in_time_total": 0, + "point_in_time_time_in_millis": 0, + "point_in_time_current": 0, + "suggest_total": 0, + "suggest_time_in_millis": 0, + "suggest_current": 0, + "request": { + "dfs_pre_query": { + "time_in_millis": 0, + "current": 0, + "total": 0 + }, + "query": { + "time_in_millis": 0, + "current": 0, + "total": 0 + }, + "fetch": { + "time_in_millis": 0, + "current": 0, + "total": 0 + }, + "dfs_query": { + "time_in_millis": 0, + "current": 0, + "total": 0 + }, + "expand": { + "time_in_millis": 0, + "current": 0, + "total": 0 + }, + "can_match": { + "time_in_millis": 0, + "current": 0, + "total": 0 + } + } + }, + "merges": { + "current": 0, + "current_docs": 0, + "current_size_in_bytes": 0, + "total": 37, + "total_time_in_millis": 8328, + "total_docs": 666460, + "total_size_in_bytes": 168442652, + "total_stopped_time_in_millis": 0, + "total_throttled_time_in_millis": 0, + "total_auto_throttle_in_bytes": 838860800, + "unreferenced_file_cleanups_performed": 0 + }, + "refresh": { + "total": 1947, + "total_time_in_millis": 102298, + "external_total": 1897, + "external_total_time_in_millis": 103024, + "listeners": 0 + }, + "flush": { + "total": 37, + "periodic": 37, + "total_time_in_millis": 8771 + }, + "warmer": { + "current": 0, + "total": 380, + "total_time_in_millis": 51 + }, + "query_cache": { + "memory_size_in_bytes": 0, + "total_count": 0, + "hit_count": 0, + "miss_count": 0, + "cache_size": 0, + "cache_count": 0, + "evictions": 0 + }, + "fielddata": { + "memory_size_in_bytes": 772, + "evictions": 0 + }, + "completion": { + "size_in_bytes": 0 + }, + "segments": { + "count": 175, + "memory_in_bytes": 0, + "terms_memory_in_bytes": 0, + "stored_fields_memory_in_bytes": 0, + "term_vectors_memory_in_bytes": 0, + "norms_memory_in_bytes": 0, + "points_memory_in_bytes": 0, + "doc_values_memory_in_bytes": 0, + "index_writer_memory_in_bytes": 1040188, + "version_map_memory_in_bytes": 36920, + "fixed_bit_set_memory_in_bytes": 240, + "max_unsafe_auto_id_timestamp": 1705596946358, + "remote_store": { + "upload": { + "total_upload_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "refresh_size_lag": { + "total_bytes": 0, + "max_bytes": 0 + }, + "max_refresh_time_lag_in_millis": 0, + "total_time_spent_in_millis": 0 + }, + "download": { + "total_download_size": { + "started_bytes": 0, + "succeeded_bytes": 0, + "failed_bytes": 0 + }, + "total_time_spent_in_millis": 0 + } + }, + "segment_replication": { + "max_bytes_behind": "0b", + "total_bytes_behind": "0b", + "max_replication_lag": "0s" + }, + "file_sizes": {} + }, + "translog": { + "operations": 244573, + "size_in_bytes": 202510227, + "uncommitted_operations": 244573, + "uncommitted_size_in_bytes": 202510227, + "earliest_last_modified_age": 7034, + "remote_store": { + "upload": { + "total_uploads": { + "started": 0, + "failed": 0, + "succeeded": 0 + }, + "total_upload_size": { + "started_bytes": 0, + "failed_bytes": 0, + "succeeded_bytes": 0 + } + } + } + }, + "request_cache": { + "memory_size_in_bytes": 0, + "evictions": 0, + "hit_count": 0, + "miss_count": 1 + }, + "recovery": { + "current_as_source": 0, + "current_as_target": 0, + "throttle_time_in_millis": 0 + } + }, + "os": { + "timestamp": 1705681973267, + "cpu": { + "percent": 37, + "load_average": { + "1m": 26.4873046875 + } + }, + "mem": { + "total_in_bytes": 34359738368, + "free_in_bytes": 53592064, + "used_in_bytes": 34306146304, + "free_percent": 0, + "used_percent": 100 + }, + "swap": { + "total_in_bytes": 2147483648, + "free_in_bytes": 463994880, + "used_in_bytes": 1683488768 + } + }, + "process": { + "timestamp": 1705681973267, + "open_file_descriptors": 708, + "max_file_descriptors": 10240, + "cpu": { + "percent": 2, + "total_in_millis": 114466 + }, + "mem": { + "total_virtual_in_bytes": 430050213888 + } + }, + "jvm": { + "timestamp": 1705681973267, + "uptime_in_millis": 1273457, + "mem": { + "heap_used_in_bytes": 148181504, + "heap_used_percent": 13, + "heap_committed_in_bytes": 1073741824, + "heap_max_in_bytes": 1073741824, + "non_heap_used_in_bytes": 195711296, + "non_heap_committed_in_bytes": 200278016, + "pools": { + "young": { + "used_in_bytes": 32505856, + "max_in_bytes": 0, + "peak_used_in_bytes": 636485632, + "peak_max_in_bytes": 0, + "last_gc_stats": { + "used_in_bytes": 0, + "max_in_bytes": 0, + "usage_percent": -1 + } + }, + "old": { + "used_in_bytes": 108335616, + "max_in_bytes": 1073741824, + "peak_used_in_bytes": 108335616, + "peak_max_in_bytes": 1073741824, + "last_gc_stats": { + "used_in_bytes": 0, + "max_in_bytes": 1073741824, + "usage_percent": 0 + } + }, + "survivor": { + "used_in_bytes": 7340032, + "max_in_bytes": 0, + "peak_used_in_bytes": 55112576, + "peak_max_in_bytes": 0, + "last_gc_stats": { + "used_in_bytes": 7340032, + "max_in_bytes": 0, + "usage_percent": -1 + } + } + } + }, + "threads": { + "count": 112, + "peak_count": 129 + }, + "gc": { + "collectors": { + "young": { + "collection_count": 33, + "collection_time_in_millis": 456 + }, + "old": { + "collection_count": 0, + "collection_time_in_millis": 0 + } + } + }, + "buffer_pools": { + "mapped": { + "count": 403, + "used_in_bytes": 7339021456, + "total_capacity_in_bytes": 7339021456 + }, + "direct": { + "count": 82, + "used_in_bytes": 11889332, + "total_capacity_in_bytes": 11889331 + }, + "mapped - 'non-volatile memory'": { + "count": 0, + "used_in_bytes": 0, + "total_capacity_in_bytes": 0 + } + }, + "classes": { + "current_loaded_count": 23833, + "total_loaded_count": 23833, + "total_unloaded_count": 0 + } + }, + "thread_pool": { + "ad-batch-task-threadpool": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "ad-threadpool": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "analyze": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "fetch_shard_started": { + "threads": 1, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 20, + "completed": 63 + }, + "fetch_shard_store": { + "threads": 1, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 15, + "completed": 15 + }, + "flush": { + "threads": 1, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 5, + "completed": 39 + }, + "force_merge": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "generic": { + "threads": 20, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 20, + "completed": 6454 + }, + "get": { + "threads": 1, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 1, + "completed": 1 + }, + "listener": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "management": { + "threads": 5, + "queue": 0, + "active": 1, + "rejected": 0, + "largest": 5, + "completed": 6726 + }, + "open_distro_job_scheduler": { + "threads": 10, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 10, + "completed": 10 + }, + "opensearch_asynchronous_search_generic": { + "threads": 2, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 2, + "completed": 6 + }, + "opensearch_ml_deploy": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "opensearch_ml_execute": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "opensearch_ml_general": { + "threads": 9, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 9, + "completed": 126 + }, + "opensearch_ml_predict": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "opensearch_ml_register": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "opensearch_ml_train": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "refresh": { + "threads": 5, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 5, + "completed": 36172 + }, + "remote_purge": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "remote_recovery": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "remote_refresh_retry": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "replication_follower": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "replication_leader": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "search": { + "threads": 3, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 3, + "completed": 3, + "total_wait_time_in_nanos": 752917 + }, + "search_throttled": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0, + "total_wait_time_in_nanos": 0 + }, + "snapshot": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "system_read": { + "threads": 5, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 5, + "completed": 372 + }, + "system_write": { + "threads": 5, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 5, + "completed": 165 + }, + "translog_sync": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "translog_transfer": { + "threads": 0, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 0, + "completed": 0 + }, + "warmer": { + "threads": 5, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 5, + "completed": 1024 + }, + "write": { + "threads": 10, + "queue": 0, + "active": 0, + "rejected": 0, + "largest": 10, + "completed": 2043 + } + }, + "fs": { + "timestamp": 1705681973268, + "total": { + "total_in_bytes": 994662584320, + "free_in_bytes": 614376128512, + "available_in_bytes": 614376128512, + "cache_reserved_in_bytes": 0 + }, + "data": [ + { + "path": "/Users/oesterheld/dev/projects/graylog-project-master/graylog-project-repos/graylog2-server/data-node/data/datanode1/nodes/0", + "mount": "/System/Volumes/Data (/dev/disk3s5)", + "type": "apfs", + "total_in_bytes": 994662584320, + "free_in_bytes": 614376128512, + "available_in_bytes": 614376128512, + "cache_reserved_in_bytes": 0 + } + ] + }, + "transport": { + "server_open": 13, + "total_outbound_connections": 1, + "rx_count": 12539, + "rx_size_in_bytes": 128502213, + "tx_count": 12539, + "tx_size_in_bytes": 132371515 + }, + "http": { + "current_open": 9, + "total_opened": 11 + }, + "breakers": { + "request": { + "limit_size_in_bytes": 644245094, + "limit_size": "614.3mb", + "estimated_size_in_bytes": 0, + "estimated_size": "0b", + "overhead": 1.0, + "tripped": 0 + }, + "fielddata": { + "limit_size_in_bytes": 429496729, + "limit_size": "409.5mb", + "estimated_size_in_bytes": 772, + "estimated_size": "772b", + "overhead": 1.03, + "tripped": 0 + }, + "in_flight_requests": { + "limit_size_in_bytes": 1073741824, + "limit_size": "1gb", + "estimated_size_in_bytes": 0, + "estimated_size": "0b", + "overhead": 2.0, + "tripped": 0 + }, + "parent": { + "limit_size_in_bytes": 1020054732, + "limit_size": "972.7mb", + "estimated_size_in_bytes": 148181504, + "estimated_size": "141.3mb", + "overhead": 1.0, + "tripped": 0 + } + }, + "script": { + "compilations": 1, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + "discovery": { + "cluster_state_queue": { + "total": 0, + "pending": 0, + "committed": 0 + }, + "published_cluster_states": { + "full_states": 2, + "incompatible_diffs": 0, + "compatible_diffs": 84 + } + }, + "ingest": { + "total": { + "count": 0, + "time_in_millis": 0, + "current": 0, + "failed": 0 + }, + "pipelines": {} + }, + "adaptive_selection": { + "qvCZ9yQ5S-OG8IKTnUdhpw": { + "outgoing_searches": 0, + "avg_queue_size": 0, + "avg_service_time_ns": 1433182, + "avg_response_time_ns": 3924883, + "rank": "3.9" + } + }, + "script_cache": { + "sum": { + "compilations": 1, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + "contexts": [ + { + "context": "aggregation_selector", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "aggs", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "aggs_combine", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "aggs_init", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "aggs_map", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "aggs_reduce", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "analysis", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "bucket_aggregation", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "field", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "filter", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "ingest", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "interval", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "moving-function", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "number_sort", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "painless_test", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "processor_conditional", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "score", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "script_heuristic", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "search", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "similarity", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "similarity_weight", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "string_sort", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "template", + "compilations": 1, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "terms_set", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + }, + { + "context": "update", + "compilations": 0, + "cache_evictions": 0, + "compilation_limit_triggered": 0 + } + ] + }, + "indexing_pressure": { + "memory": { + "current": { + "combined_coordinating_and_primary_in_bytes": 0, + "coordinating_in_bytes": 0, + "primary_in_bytes": 0, + "replica_in_bytes": 0, + "all_in_bytes": 0 + }, + "total": { + "combined_coordinating_and_primary_in_bytes": 360646972, + "coordinating_in_bytes": 236354572, + "primary_in_bytes": 245265065, + "replica_in_bytes": 0, + "all_in_bytes": 360646972, + "coordinating_rejections": 0, + "primary_rejections": 0, + "replica_rejections": 0 + }, + "limit_in_bytes": 107374182 + } + }, + "shard_indexing_pressure": { + "stats": {}, + "total_rejections_breakup_shadow_mode": { + "node_limits": 0, + "no_successful_request_limits": 0, + "throughput_degradation_limits": 0 + }, + "enabled": false, + "enforced": false + }, + "search_backpressure": { + "search_task": { + "resource_tracker_stats": { + "cpu_usage_tracker": { + "cancellation_count": 0, + "current_max_millis": 0, + "current_avg_millis": 0 + }, + "heap_usage_tracker": { + "cancellation_count": 0, + "current_max_bytes": 0, + "current_avg_bytes": 0, + "rolling_avg_bytes": 811 + }, + "elapsed_time_tracker": { + "cancellation_count": 0, + "current_max_millis": 0, + "current_avg_millis": 0 + } + }, + "cancellation_stats": { + "cancellation_count": 0, + "cancellation_limit_reached_count": 0 + } + }, + "search_shard_task": { + "resource_tracker_stats": { + "cpu_usage_tracker": { + "cancellation_count": 0, + "current_max_millis": 0, + "current_avg_millis": 0 + }, + "heap_usage_tracker": { + "cancellation_count": 0, + "current_max_bytes": 0, + "current_avg_bytes": 0, + "rolling_avg_bytes": 2259 + }, + "elapsed_time_tracker": { + "cancellation_count": 0, + "current_max_millis": 0, + "current_avg_millis": 0 + } + }, + "cancellation_stats": { + "cancellation_count": 0, + "cancellation_limit_reached_count": 0 + } + }, + "mode": "monitor_only" + }, + "cluster_manager_throttling": { + "stats": { + "total_throttled_tasks": 0, + "throttled_tasks_per_task_type": {} + } + }, + "weighted_routing": { + "stats": { + "fail_open_count": 0 + } + }, + "task_cancellation": { + "search_shard_task": { + "current_count_post_cancel": 0, + "total_count_post_cancel": 0 + } + }, + "search_pipeline": { + "total_request": { + "count": 0, + "time_in_millis": 0, + "current": 0, + "failed": 0 + }, + "total_response": { + "count": 0, + "time_in_millis": 0, + "current": 0, + "failed": 0 + }, + "pipelines": {} + } + } + } + } + """; + + +} diff --git a/data-node/src/test/java/org/graylog/datanode/opensearch/OpensearchProcessImplTest.java b/data-node/src/test/java/org/graylog/datanode/opensearch/OpensearchProcessImplTest.java new file mode 100644 index 000000000000..77914ef48a61 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/opensearch/OpensearchProcessImplTest.java @@ -0,0 +1,133 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.eventbus.EventBus; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.DatanodeConfiguration; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.health.ClusterHealthResponse; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.settings.ClusterGetSettingsResponse; +import org.graylog.shaded.opensearch2.org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.graylog.shaded.opensearch2.org.opensearch.client.ClusterClient; +import org.graylog.shaded.opensearch2.org.opensearch.client.RequestOptions; +import org.graylog.shaded.opensearch2.org.opensearch.client.RestHighLevelClient; +import org.graylog.shaded.opensearch2.org.opensearch.common.settings.Settings; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.plugin.system.SimpleNodeId; +import org.graylog2.security.CustomCAX509TrustManager; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import java.io.IOException; +import java.util.Optional; +import java.util.concurrent.ScheduledExecutorService; + +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class OpensearchProcessImplTest { + + OpensearchProcessImpl opensearchProcess; + @Mock + private DatanodeConfiguration datanodeConfiguration; + @Mock + private CustomCAX509TrustManager trustmManager; + @Mock + private Configuration configuration; + @Mock + private ObjectMapper objectMapper; + @Mock + private OpensearchStateMachine processState; + private final String nodeName = "test-node"; + private final NodeId nodeId = new SimpleNodeId(nodeName); + @Mock + private EventBus eventBus; + + @Mock + RestHighLevelClient restClient; + @Mock + ClusterClient clusterClient; + + @Before + public void setup() throws IOException { + when(datanodeConfiguration.processLogsBufferSize()).thenReturn(100); + when(configuration.getDatanodeNodeName()).thenReturn(nodeName); + this.opensearchProcess = spy(new OpensearchProcessImpl(datanodeConfiguration, trustmManager, configuration, + objectMapper, processState, nodeId, eventBus)); + when(opensearchProcess.restClient()).thenReturn(Optional.of(restClient)); + when(restClient.cluster()).thenReturn(clusterClient); + } + + + @Test + public void testResetAllocation() throws IOException { + Settings settings = Settings.builder() + .put(OpensearchProcessImpl.CLUSTER_ROUTING_ALLOCATION_EXCLUDE_SETTING, nodeName) + .build(); + when(clusterClient.getSettings(any(), any())).thenReturn(new ClusterGetSettingsResponse(null, settings, null)); + opensearchProcess.available(); + + ArgumentCaptor settingsRequest = + ArgumentCaptor.forClass(ClusterUpdateSettingsRequest.class); + verify(clusterClient).putSettings(settingsRequest.capture(), eq(RequestOptions.DEFAULT)); + assertNull(settingsRequest.getValue() + .transientSettings() + .get(OpensearchProcessImpl.CLUSTER_ROUTING_ALLOCATION_EXCLUDE_SETTING) + ); + assertTrue(opensearchProcess.allocationExcludeChecked); + } + + @Test + public void testResetAllocationUnneccessary() throws IOException { + Settings settings = Settings.builder() + .put(OpensearchProcessImpl.CLUSTER_ROUTING_ALLOCATION_EXCLUDE_SETTING, "notmynodename") + .build(); + when(clusterClient.getSettings(any(), any())).thenReturn(new ClusterGetSettingsResponse(null, settings, null)); + opensearchProcess.available(); + verify(clusterClient).getSettings(any(), any()); + verifyNoMoreInteractions(clusterClient); + assertTrue(opensearchProcess.allocationExcludeChecked); + } + + @Test + public void testShutdownWhenRemovedSuccessfully() throws IOException { + ClusterHealthResponse health = mock(ClusterHealthResponse.class); + when(health.getRelocatingShards()).thenReturn(0); + when(clusterClient.health(any(), any())).thenReturn(health); + final ScheduledExecutorService executor = mock(ScheduledExecutorService.class); + opensearchProcess.executorService = executor; + opensearchProcess.checkRemovalStatus(); + verify(processState).fire(OpensearchEvent.PROCESS_STOPPED); + verify(executor).shutdown(); + } + +} diff --git a/data-node/src/test/java/org/graylog/datanode/opensearch/cli/OpensearchKeystoreCommandLineIT.java b/data-node/src/test/java/org/graylog/datanode/opensearch/cli/OpensearchKeystoreCommandLineIT.java new file mode 100644 index 000000000000..86bd4a36cdd6 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/opensearch/cli/OpensearchKeystoreCommandLineIT.java @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.cli; + +import jakarta.annotation.Nonnull; +import org.assertj.core.api.Assertions; +import org.graylog.datanode.OpensearchDistribution; +import org.graylog.datanode.configuration.OpensearchArchitecture; +import org.graylog.datanode.configuration.OpensearchDistributionProvider; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.util.List; + +class OpensearchKeystoreCommandLineIT { + + @Test + void testKeystoreLifecycle(@TempDir Path tempDir) throws URISyntaxException { + final OpensearchCli cli = createCli(tempDir); + final String createdResponse = cli.keystore().create(); + + Assertions.assertThat(createdResponse).contains("Created opensearch keystore"); + + cli.keystore().add("s3.client.default.access_key", "foo"); + cli.keystore().add("s3.client.default.secret_key", "bar"); + + final List response = cli.keystore().list(); + Assertions.assertThat(response) + .hasSize(3) // two keys and one internal seed + .contains("s3.client.default.access_key") + .contains("s3.client.default.secret_key"); + } + + private OpensearchCli createCli(Path tempDir) throws URISyntaxException { + final Path binDirPath = detectOpensearchBinDir(); + return new OpensearchCli(tempDir, binDirPath); + } + + @Nonnull + private Path detectOpensearchBinDir() throws URISyntaxException { + final Path opensearchDistRoot = Path.of(getClass().getResource("/").toURI()).getParent().resolve("opensearch"); + final OpensearchDistributionProvider distributionProvider = new OpensearchDistributionProvider(opensearchDistRoot, OpensearchArchitecture.fromOperatingSystem()); + final OpensearchDistribution opensearchDistribution = distributionProvider.get(); + return opensearchDistribution.getOpensearchBinDirPath(); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/opensearch/configuration/beans/DatanodeConfigurationPartTest.java b/data-node/src/test/java/org/graylog/datanode/opensearch/configuration/beans/DatanodeConfigurationPartTest.java new file mode 100644 index 000000000000..3b43c8114644 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/opensearch/configuration/beans/DatanodeConfigurationPartTest.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration.beans; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; +import org.junit.jupiter.api.Test; + +import java.util.Collections; + +class DatanodeConfigurationPartTest { + + @Test + void testConfigBuild() { + final DatanodeConfigurationPart configurationPart = DatanodeConfigurationPart.builder() + .addNodeRole("cluster_manager") + .addNodeRole("data") + .addNodeRole("search") + .keystoreItems(Collections.singletonMap("foo", "bar")) + .properties(Collections.singletonMap("reindex.remote.allowlist", "localhost:9201")) + .systemProperty("file.encoding", "utf-8") + .systemProperty("java.home", "/jdk") + .build(); + + Assertions.assertThat(configurationPart.nodeRoles()) + .hasSize(3) + .contains("cluster_manager", "data", "search"); + + Assertions.assertThat(configurationPart.keystoreItems()) + .hasSize(1) + .containsEntry("foo", "bar"); + + Assertions.assertThat(configurationPart.properties()) + .hasSize(1) + .containsEntry("reindex.remote.allowlist", "localhost:9201"); + + Assertions.assertThat(configurationPart.systemProperties()) + .hasSize(2) + .containsEntry("file.encoding", "utf-8") + .containsEntry("java.home", "/jdk"); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/opensearch/configuration/beans/impl/SearchableSnapshotsConfigurationBeanTest.java b/data-node/src/test/java/org/graylog/datanode/opensearch/configuration/beans/impl/SearchableSnapshotsConfigurationBeanTest.java new file mode 100644 index 000000000000..6c3ac26d5d35 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/opensearch/configuration/beans/impl/SearchableSnapshotsConfigurationBeanTest.java @@ -0,0 +1,184 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.configuration.beans.impl; + +import com.github.joschi.jadconfig.JadConfig; +import com.github.joschi.jadconfig.RepositoryException; +import com.github.joschi.jadconfig.ValidationException; +import com.github.joschi.jadconfig.repositories.InMemoryRepository; +import org.assertj.core.api.Assertions; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.configuration.OpensearchConfigurationException; +import org.graylog.datanode.configuration.S3RepositoryConfiguration; +import org.graylog.datanode.opensearch.configuration.OpensearchConfigurationParams; +import org.graylog.datanode.opensearch.configuration.OpensearchUsableSpace; +import org.graylog.datanode.process.configuration.beans.DatanodeConfigurationPart; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +class SearchableSnapshotsConfigurationBeanTest { + + @Test + void testS3Repo(@TempDir Path tempDir) throws ValidationException, RepositoryException { + final S3RepositoryConfiguration config = s3Configuration(Map.of( + "s3_client_default_access_key", "user", + "s3_client_default_secret_key", "password", + "s3_client_default_endpoint", "http://localhost:9000" + + )); + + final SearchableSnapshotsConfigurationBean bean = new SearchableSnapshotsConfigurationBean( + datanodeConfiguration(Map.of( + "node_search_cache_size", "10gb" + )), + config, + () -> new OpensearchUsableSpace(tempDir, 20L * 1024 * 1024 * 1024)); + + final DatanodeConfigurationPart configurationPart = bean.buildConfigurationPart(emptyBuildParams()); + + Assertions.assertThat(configurationPart.nodeRoles()) + .contains(SearchableSnapshotsConfigurationBean.SEARCH_NODE_ROLE); + + Assertions.assertThat(configurationPart.keystoreItems()) + .containsKeys("s3.client.default.access_key", "s3.client.default.secret_key"); + + Assertions.assertThat(configurationPart.properties()) + .containsKeys("s3.client.default.endpoint", "node.search.cache.size"); + } + + private OpensearchConfigurationParams emptyBuildParams() { + return new OpensearchConfigurationParams(Collections.emptyList(), Collections.emptyMap()); + } + + @Test + void testLocalFilesystemRepo(@TempDir Path tempDir) throws ValidationException, RepositoryException { + // no s3 repo configuration properties given by the user + final S3RepositoryConfiguration config = s3Configuration(Map.of()); + + // only path_repo in general datanode configuration + final SearchableSnapshotsConfigurationBean bean = new SearchableSnapshotsConfigurationBean( + datanodeConfiguration(Map.of( + "path_repo", "/mnt/data/snapshots", + "node_search_cache_size", "10gb" + )), + config, + () -> new OpensearchUsableSpace(tempDir, 20L * 1024 * 1024 * 1024)); + + final DatanodeConfigurationPart configurationPart = bean.buildConfigurationPart(emptyBuildParams()); + + Assertions.assertThat(configurationPart.nodeRoles()) + .contains(SearchableSnapshotsConfigurationBean.SEARCH_NODE_ROLE); + + Assertions.assertThat(configurationPart.keystoreItems()) + .isEmpty(); + + Assertions.assertThat(configurationPart.properties()) + .containsEntry("path.repo", "/mnt/data/snapshots") + .containsEntry("node.search.cache.size", "10gb"); + } + + @Test + void testNoSnapshotConfiguration(@TempDir Path tempDir) throws ValidationException, RepositoryException { + // no s3 repo configuration properties given by the user + final S3RepositoryConfiguration config = s3Configuration(Map.of()); + + // only path_repo in general datanode configuration + final SearchableSnapshotsConfigurationBean bean = new SearchableSnapshotsConfigurationBean( + datanodeConfiguration(Map.of( + "node_search_cache_size", "10gb" + )), + config, + () -> new OpensearchUsableSpace(tempDir, 20L * 1024 * 1024 * 1024)); + + final DatanodeConfigurationPart configurationPart = bean.buildConfigurationPart(emptyBuildParams()); + + Assertions.assertThat(configurationPart.nodeRoles()) + .isEmpty(); // no search role should be provided + + Assertions.assertThat(configurationPart.keystoreItems()) + .isEmpty(); + + Assertions.assertThat(configurationPart.properties()) + .isEmpty(); // no cache configuration should be provided + } + + @Test + void testCacheSizeValidation(@TempDir Path tempDir) throws ValidationException, RepositoryException { + final S3RepositoryConfiguration config = s3Configuration(Map.of( + "s3_client_default_access_key", "user", + "s3_client_default_secret_key", "password", + "s3_client_default_endpoint", "http://localhost:9000" + + )); + + final SearchableSnapshotsConfigurationBean bean = new SearchableSnapshotsConfigurationBean( + datanodeConfiguration(Map.of( + "node_search_cache_size", "10gb" + )), + config, + () -> new OpensearchUsableSpace(tempDir, 8L * 1024 * 1024 * 1024)); + + // 10GB cache requested on 8GB of free space, needs to throw an exception! + Assertions.assertThatThrownBy(() -> bean.buildConfigurationPart(emptyBuildParams())) + .isInstanceOf(OpensearchConfigurationException.class) + .hasMessageContaining("There is not enough usable space for the node search cache. Your system has only 8gb available"); + } + + @Test + void testRepoConfigWithoutSearchRole(@TempDir Path tempDir) throws ValidationException, RepositoryException { + final S3RepositoryConfiguration config = s3Configuration(Map.of()); + + // only path_repo in general datanode configuration + final SearchableSnapshotsConfigurationBean bean = new SearchableSnapshotsConfigurationBean( + datanodeConfiguration(Map.of( + "node_roles", "cluster_manager,data,ingest,remote_cluster_client", + "path_repo", "/mnt/data/snapshots", + "node_search_cache_size", "10gb" + )), + config, + () -> new OpensearchUsableSpace(tempDir, 20L * 1024 * 1024 * 1024)); + + final DatanodeConfigurationPart configurationPart = bean.buildConfigurationPart(emptyBuildParams()); + + Assertions.assertThat(configurationPart.nodeRoles()) + .isEmpty(); // no search role should be provided, we have to use only those that are given in the configuration + + Assertions.assertThat(configurationPart.properties()) + .containsEntry("path.repo", "/mnt/data/snapshots") + .doesNotContainEntry("node.search.cache.size", "10gb"); + } + + private S3RepositoryConfiguration s3Configuration(Map properties) throws RepositoryException, ValidationException { + final S3RepositoryConfiguration configuration = new S3RepositoryConfiguration(); + new JadConfig(new InMemoryRepository(properties), configuration).process(); + return configuration; + } + + private Configuration datanodeConfiguration(Map properties) throws RepositoryException, ValidationException { + final Configuration configuration = new Configuration(); + final InMemoryRepository mandatoryProps = new InMemoryRepository(Map.of( + "password_secret", "thisisverysecretpassword" + )); + new JadConfig(List.of(mandatoryProps, new InMemoryRepository(properties)), configuration).process(); + return configuration; + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/FailuresCounterTest.java b/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/FailuresCounterTest.java new file mode 100644 index 000000000000..4decb6033165 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/FailuresCounterTest.java @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +class FailuresCounterTest { + + @Test + void testCounter() { + final FailuresCounter failuresCounter = FailuresCounter.oneBased(3); + Assertions.assertThat(failuresCounter.failedTooManyTimes()).isFalse(); + failuresCounter.increment(); + failuresCounter.increment(); + Assertions.assertThat(failuresCounter.failedTooManyTimes()).isTrue(); + failuresCounter.resetFailuresCounter(); + Assertions.assertThat(failuresCounter.failedTooManyTimes()).isFalse(); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachineTest.java b/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachineTest.java new file mode 100644 index 000000000000..ef8af4747dcd --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/OpensearchStateMachineTest.java @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine; + +import com.github.oxo42.stateless4j.StateMachine; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.tracer.StateMachineTracer; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.Set; + +@ExtendWith(MockitoExtension.class) +class OpensearchStateMachineTest { + + @Mock + OpensearchProcess opensearchProcess; + Set tracer = Set.of(); + + @Test + void testOptimalScenario() { + final StateMachine machine = OpensearchStateMachine.createNew(opensearchProcess, tracer); + Assertions.assertEquals(machine.getState(), OpensearchState.WAITING_FOR_CONFIGURATION); + + machine.fire(OpensearchEvent.PROCESS_PREPARED); + Assertions.assertEquals(OpensearchState.PREPARED, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_STARTED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_OK); + Assertions.assertEquals(OpensearchState.AVAILABLE, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_TERMINATED); + Assertions.assertEquals(OpensearchState.TERMINATED, machine.getState()); + } + + @Test + void testRestFailing() { + final StateMachine machine = OpensearchStateMachine.createNew(opensearchProcess, tracer); + Assertions.assertEquals(machine.getState(), OpensearchState.WAITING_FOR_CONFIGURATION); + + machine.fire(OpensearchEvent.PROCESS_PREPARED); + Assertions.assertEquals(OpensearchState.PREPARED, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_STARTED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_OK); + Assertions.assertEquals(OpensearchState.AVAILABLE, machine.getState()); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + + // three failures are still accepted + Assertions.assertEquals(OpensearchState.NOT_RESPONDING, machine.getState()); + + // fourth should bring the state to FAILED + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + Assertions.assertEquals(OpensearchState.FAILED, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_OK); + Assertions.assertEquals(OpensearchState.AVAILABLE, machine.getState()); + } + + @Test + void testStartupFailure() { + final StateMachine machine = OpensearchStateMachine.createNew(opensearchProcess, tracer); + Assertions.assertEquals(machine.getState(), OpensearchState.WAITING_FOR_CONFIGURATION); + + machine.fire(OpensearchEvent.PROCESS_PREPARED); + Assertions.assertEquals(OpensearchState.PREPARED, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_STARTED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + // after five repetitions we give up waiting for the process start and fail + Assertions.assertEquals(OpensearchState.FAILED, machine.getState()); + } + + @Test + void testStartupFailureResolved() { + final StateMachine machine = OpensearchStateMachine.createNew(opensearchProcess, tracer); + Assertions.assertEquals(machine.getState(), OpensearchState.WAITING_FOR_CONFIGURATION); + + machine.fire(OpensearchEvent.PROCESS_PREPARED); + Assertions.assertEquals(OpensearchState.PREPARED, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_STARTED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + machine.fire(OpensearchEvent.HEALTH_CHECK_OK); + // succeeded just in time before we give up + Assertions.assertEquals(OpensearchState.AVAILABLE, machine.getState()); + } + + @Test + void testSuccessfullRemoval() { + final StateMachine machine = OpensearchStateMachine.createNew(opensearchProcess, tracer); + Assertions.assertEquals(machine.getState(), OpensearchState.WAITING_FOR_CONFIGURATION); + + machine.fire(OpensearchEvent.PROCESS_PREPARED); + Assertions.assertEquals(OpensearchState.PREPARED, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_STARTED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_OK); + Assertions.assertEquals(OpensearchState.AVAILABLE, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_REMOVE); + Assertions.assertEquals(OpensearchState.REMOVING, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_STOPPED); + Assertions.assertEquals(OpensearchState.REMOVED, machine.getState()); + + } + + @Test + void testFailingRemoval() { + final StateMachine machine = OpensearchStateMachine.createNew(opensearchProcess, tracer); + Assertions.assertEquals(machine.getState(), OpensearchState.WAITING_FOR_CONFIGURATION); + + machine.fire(OpensearchEvent.PROCESS_PREPARED); + Assertions.assertEquals(OpensearchState.PREPARED, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_STARTED); + Assertions.assertEquals(OpensearchState.STARTING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_OK); + Assertions.assertEquals(OpensearchState.AVAILABLE, machine.getState()); + + machine.fire(OpensearchEvent.PROCESS_REMOVE); + Assertions.assertEquals(OpensearchState.REMOVING, machine.getState()); + + machine.fire(OpensearchEvent.HEALTH_CHECK_FAILED); + Assertions.assertEquals(OpensearchState.FAILED, machine.getState()); + + } + +} diff --git a/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/tracer/OpensearchWatchdogTracerTest.java b/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/tracer/OpensearchWatchdogTracerTest.java new file mode 100644 index 000000000000..1231209d4fc1 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/opensearch/statemachine/tracer/OpensearchWatchdogTracerTest.java @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.opensearch.statemachine.tracer; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.opensearch.OpensearchProcess; +import org.graylog.datanode.opensearch.statemachine.OpensearchEvent; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.datanode.opensearch.statemachine.OpensearchStateMachine; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.Set; + +@ExtendWith(MockitoExtension.class) +class OpensearchWatchdogTracerTest { + + @Mock + OpensearchProcess opensearchProcess; + + @Test + void testLifecycle() { + final OpensearchWatchdog watchdog = new OpensearchWatchdog(3); + OpensearchStateMachine stateMachine = OpensearchStateMachine.createNew(opensearchProcess, Set.of(watchdog)); + stateMachine.fire(OpensearchEvent.PROCESS_STARTED); + + // both process and watchdog are running now. Let's stop the process and see if the watchdog will restart it + terminateProcess(stateMachine); + + // see if the process is starting again + Assertions.assertThat(isInStartingState(stateMachine)).isTrue(); + + // repeat + terminateProcess(stateMachine); + Assertions.assertThat(isInStartingState(stateMachine)).isTrue(); + + terminateProcess(stateMachine); + Assertions.assertThat(isInStartingState(stateMachine)).isTrue(); + + // this is the 4th termination, we give up trying + terminateProcess(stateMachine); + + Assertions.assertThat(watchdog.isActive()).isFalse(); + Assertions.assertThat(stateMachine.getState()).isEqualTo(OpensearchState.TERMINATED); + } + + private void terminateProcess(OpensearchStateMachine stateMachine) { + stateMachine.fire(OpensearchEvent.PROCESS_TERMINATED); + } + + private boolean isInStartingState(OpensearchStateMachine stateMachine) { + return stateMachine.getState() == OpensearchState.STARTING; + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/periodicals/NodePingPeriodicalTest.java b/data-node/src/test/java/org/graylog/datanode/periodicals/NodePingPeriodicalTest.java new file mode 100644 index 000000000000..675516db7907 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/periodicals/NodePingPeriodicalTest.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.periodicals; + +import org.assertj.core.api.Assertions; +import org.graylog.datanode.Configuration; +import org.graylog.datanode.opensearch.statemachine.OpensearchState; +import org.graylog.testing.mongodb.MongoDBExtension; +import org.graylog.testing.mongodb.MongoDBTestService; +import org.graylog2.cluster.nodes.DataNodeClusterService; +import org.graylog2.cluster.nodes.DataNodeStatus; +import org.graylog2.plugin.system.SimpleNodeId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import java.net.URI; +import java.util.Date; +import java.util.List; + +@ExtendWith(MongoDBExtension.class) +class NodePingPeriodicalTest { + + private DataNodeClusterService nodeService; + + @BeforeEach + void setUp(MongoDBTestService mongodb) { + nodeService = new DataNodeClusterService(mongodb.mongoConnection(), new org.graylog2.Configuration()); + } + + @Test + void doRun() { + + final SimpleNodeId nodeID = new SimpleNodeId("5ca1ab1e-0000-4000-a000-000000000000"); + final URI uri = URI.create("http://localhost:9200"); + final String cluster = "localhost:9300"; + final String datanodeRestApi = "http://localhost:8999"; + @SuppressWarnings("unchecked") + + + + final NodePingPeriodical task = new NodePingPeriodical( + nodeService, + nodeID, + new Configuration(), + () -> uri, + () -> cluster, + () -> datanodeRestApi, + () -> OpensearchState.AVAILABLE, + Date::new, + () -> List.of("search", "ingest") + ); + + task.doRun(); + + Assertions.assertThat(nodeService.allActive().values()) + .hasSize(1) + .allSatisfy(nodeDto -> { + Assertions.assertThat(nodeDto.getTransportAddress()).isEqualTo("http://localhost:9200"); + Assertions.assertThat(nodeDto.getClusterAddress()).isEqualTo("localhost:9300"); + Assertions.assertThat(nodeDto.getDataNodeStatus()).isEqualTo(DataNodeStatus.AVAILABLE); + Assertions.assertThat(nodeDto.getNodeId()).isEqualTo("5ca1ab1e-0000-4000-a000-000000000000"); + Assertions.assertThat(nodeDto.getLastSeen()).isNotNull(); + Assertions.assertThat(nodeDto.getProvisioningInformation().certValidUntil()).isNotNull(); + Assertions.assertThat(nodeDto.getOpensearchRoles().containsAll(List.of("search", "ingest"))).isTrue(); + }); + } + +} diff --git a/data-node/src/test/java/org/graylog/datanode/process/CommandLineProcessTest.java b/data-node/src/test/java/org/graylog/datanode/process/CommandLineProcessTest.java new file mode 100644 index 000000000000..f0f7808986fa --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/process/CommandLineProcessTest.java @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.Retryer; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import org.apache.commons.exec.ExecuteException; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +class CommandLineProcessTest { + + private static final Logger LOG = LoggerFactory.getLogger(CommandLineProcessTest.class); + private Path binPath; + + @BeforeEach + void setUp() throws URISyntaxException, IOException { + final URL bin = getClass().getResource("test-script.sh"); + assert bin != null; + binPath = Path.of(bin.toURI()); + + // make sure that the binary has the correct permissions before we try to run it + Files.setPosixFilePermissions(binPath, PosixFilePermissions.fromString("rwxr-xr-x")); + } + + @Test + void testManualStop() throws IOException, ExecutionException, RetryException { + List stdout = new LinkedList<>(); + List stdErr = new LinkedList<>(); + + final ProcessListener listener = new ProcessListener() { + @Override + public void onStart() { + } + + @Override + public void onStdOut(String line) { + LOG.info("Stdout:" + line); + stdout.add(line); + } + + @Override + public void onStdErr(String line) { + LOG.info("Stderr:" + line); + stdErr.add(line); + } + + @Override + public void onProcessComplete(int exitValue) { + LOG.info("On process complete:" + exitValue); + } + + @Override + public void onProcessFailed(ExecuteException e) { + LOG.info("On process failed:", e); + } + }; + final CommandLineProcess process = new CommandLineProcess( + binPath, + Collections.emptyList(), + listener, + new Environment(Map.of("USER", "test", "JAVA_HOME", "/path/to/jre")) + ); + process.start(); + + waitTillLogsAreAvailable(stdout, 3); + waitTillLogsAreAvailable(stdErr, 1); + + // if the lines are there, it switches to infinite loop. We'll have to terminate it + process.stop(); + + Assertions.assertThat(stdout) + .hasSize(3) + .containsExactlyInAnyOrder("Hello World", "second line", "third line"); + + Assertions.assertThat(stdErr) + .hasSize(1) + .contains("This message goes to stderr"); + + } + + private void waitTillLogsAreAvailable(List logs, int expectedLinesCount) throws ExecutionException, RetryException { + final Retryer> retryer = RetryerBuilder.>newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(100, TimeUnit.MILLISECONDS)) + .withStopStrategy(StopStrategies.stopAfterAttempt(20)) + .retryIfResult((res) -> res.size() < expectedLinesCount) + .build(); + + + retryer.call(() -> logs); + } + + @Test + void testExitCode() throws ExecutionException, InterruptedException, TimeoutException { + final CompletableFuture exitCodeFuture = new CompletableFuture<>(); + + final ProcessListener listener = new ProcessListener() { + @Override + public void onStart() { + } + + @Override + public void onStdOut(String line) { + LOG.info("Stdout:" + line); + } + + @Override + public void onStdErr(String line) { + LOG.info("Stderr:" + line); + } + + @Override + public void onProcessComplete(int exitValue) { + exitCodeFuture.complete(exitValue); + } + + @Override + public void onProcessFailed(ExecuteException e) { + exitCodeFuture.complete(e.getExitValue()); + } + }; + final CommandLineProcess process = new CommandLineProcess(binPath, List.of("143"), listener, new Environment(System.getenv())); + process.start(); + + final Integer exitCode = exitCodeFuture.get(10, TimeUnit.SECONDS); + + Assertions.assertThat(exitCode).isEqualTo(143); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/process/EnvironmentTest.java b/data-node/src/test/java/org/graylog/datanode/process/EnvironmentTest.java new file mode 100644 index 000000000000..5bc742cdf0c2 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/process/EnvironmentTest.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Map; + +class EnvironmentTest { + + @Test + void testFiltering() { + final Environment env = new Environment(Map.of("USER", "test", "JAVA_HOME", "/path/to/jre")); + Assertions.assertThat(env.getEnv()) + .doesNotContainKey("JAVA_HOME") + .containsKey("USER"); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/process/LoggingOutputStreamTest.java b/data-node/src/test/java/org/graylog/datanode/process/LoggingOutputStreamTest.java new file mode 100644 index 000000000000..29a0c5da9d33 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/process/LoggingOutputStreamTest.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.process; + +import org.apache.commons.exec.StreamPumper; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.LinkedList; +import java.util.List; + +class LoggingOutputStreamTest { + @Test + void testConsumer() { + + final List lines = new LinkedList<>(); + final LoggingOutputStream loggingOutputStream = new LoggingOutputStream(lines::add); + + String text = """ + first line + second line + third line + """; + final ByteArrayInputStream source = new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8)); + + final StreamPumper pumper = new StreamPumper(source, loggingOutputStream); + pumper.run(); + + Assertions.assertThat(lines) + .hasSize(3) + .contains("first line", "second line", "third line"); + + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/rest/CertificatesControllerTest.java b/data-node/src/test/java/org/graylog/datanode/rest/CertificatesControllerTest.java new file mode 100644 index 000000000000..b59a8c666c3c --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/rest/CertificatesControllerTest.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import org.graylog.datanode.configuration.DatanodeKeystore; +import org.graylog.datanode.configuration.OpensearchKeystoreProvider; +import org.graylog.security.certutil.CertRequest; +import org.graylog.security.certutil.CertificateGenerator; +import org.graylog.security.certutil.KeyPair; +import org.graylog.security.certutil.KeyStoreDto; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.security.KeyStore; +import java.security.cert.Certificate; +import java.time.Duration; +import java.util.Map; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.graylog.security.certutil.CertConstants.PKCS12; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class CertificatesControllerTest { + + @Mock + private DatanodeKeystore datanodeKeystore; + + + private CertificatesController certificatesController; + + @BeforeEach + public void setup() { + certificatesController = new CertificatesController(datanodeKeystore, Map.of( + OpensearchKeystoreProvider.Store.TRUSTSTORE, KeyStoreDto.empty(), + OpensearchKeystoreProvider.Store.HTTP, KeyStoreDto.empty(), + OpensearchKeystoreProvider.Store.TRANSPORT, KeyStoreDto.empty()) + ); + } + + @Test + public void testOptionalSecurityConfiguration() throws Exception { + when(datanodeKeystore.loadKeystore()).thenReturn(testKeyStore()); + Map certificates = certificatesController.getCertificates(); + assertThat(certificates).hasSize(4); + assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates()).hasSize(3); + assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates().get("ca")).hasSize(1); + assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates().get("host")).hasSize(2); + assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates().get("cert")).hasSize(1); + assertThat(certificates.get(OpensearchKeystoreProvider.Store.TRUSTSTORE).certificates()).hasSize(0); + } + + private KeyStore testKeyStore() throws Exception { + char[] pass = "dummy".toCharArray(); + KeyStore keystore = KeyStore.getInstance(PKCS12); + keystore.load(null, null); + final CertRequest certRequest = CertRequest.selfSigned("ca") + .isCA(true) + .validity(Duration.ofDays(1)); + KeyPair ca = CertificateGenerator.generate(certRequest); + final CertRequest certRequest2 = CertRequest.signed("host", ca) + .isCA(false) + .withSubjectAlternativeName("altname") + .validity(Duration.ofDays(1)); + KeyPair host = CertificateGenerator.generate(certRequest2); + keystore.setKeyEntry("ca", ca.privateKey(), pass, new Certificate[]{ca.certificate()}); + keystore.setKeyEntry("host", host.privateKey(), pass, new Certificate[]{host.certificate(), ca.certificate()}); + keystore.setCertificateEntry("cert", ca.certificate()); + return keystore; + } + +} diff --git a/data-node/src/test/java/org/graylog/datanode/rest/OpensearchLockCheckControllerTest.java b/data-node/src/test/java/org/graylog/datanode/rest/OpensearchLockCheckControllerTest.java new file mode 100644 index 000000000000..62213ca13110 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/rest/OpensearchLockCheckControllerTest.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.rest; + +import jakarta.annotation.Nonnull; +import org.assertj.core.api.Assertions; +import org.graylog.plugins.views.storage.migration.state.actions.OpensearchLockCheckResult; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.nio.channels.FileLock; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; + +class OpensearchLockCheckControllerTest { + @Test + void testNotLockedDir(@TempDir Path tempDir) throws IOException { + final OpensearchLockCheckController controller = new OpensearchLockCheckController(tempDir); + createLockFile(tempDir); + final OpensearchLockCheckResult result = controller.checkLockFiles(); + Assertions.assertThat(result.locks()) + .hasSize(1) + .allSatisfy(l -> Assertions.assertThat(l.locked()).isFalse()); + } + + @Test + void testLockedDir(@TempDir Path tempDir) throws IOException { + final OpensearchLockCheckController controller = new OpensearchLockCheckController(tempDir); + final Path lockFile = createLockFile(tempDir); + lock(lockFile); + final OpensearchLockCheckResult result = controller.checkLockFiles(); + Assertions.assertThat(result.locks()) + .hasSize(1) + .allSatisfy(l -> Assertions.assertThat(l.locked()).isTrue()); + } + + @Test + void testEmptyDir(@TempDir Path tempDir) { + final OpensearchLockCheckController controller = new OpensearchLockCheckController(tempDir); + final OpensearchLockCheckResult result = controller.checkLockFiles(); + Assertions.assertThat(result.locks()) + .isEmpty(); + } + + @Nonnull + private static Path createLockFile(Path tempDir) throws IOException { + final Path nodeDir = tempDir.resolve("nodes").resolve("0"); + Files.createDirectories(nodeDir); + final Path lockFile = nodeDir.resolve("node.lock"); + Files.createFile(lockFile); + return lockFile; + } + + private FileLock lock(Path lockFile) throws IOException { + FileChannel channel = FileChannel.open(lockFile, StandardOpenOption.WRITE); + return channel.lock(); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeContainerizedBackend.java b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeContainerizedBackend.java new file mode 100644 index 000000000000..4a37686ca304 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeContainerizedBackend.java @@ -0,0 +1,139 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.testinfra; + +import com.github.joschi.jadconfig.util.Duration; +import jakarta.inject.Provider; +import org.graylog.testing.completebackend.ContainerizedGraylogBackend; +import org.graylog.testing.completebackend.DefaultMavenProjectDirProvider; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.datanode.DatanodeDockerHooks; +import org.graylog.testing.graylognode.MavenPackager; +import org.graylog.testing.mongodb.MongoDBTestService; +import org.graylog2.security.IndexerJwtAuthTokenProvider; +import org.graylog2.security.JwtSecret; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; + +public class DatanodeContainerizedBackend { + public static final String IMAGE_WORKING_DIR = "/usr/share/graylog/datanode"; + static public final String SIGNING_SECRET = ContainerizedGraylogBackend.PASSWORD_SECRET; + + public static final Provider JWT_AUTH_TOKEN_PROVIDER = new IndexerJwtAuthTokenProvider(new JwtSecret(SIGNING_SECRET), Duration.seconds(120), Duration.seconds(60)); + + public static final int DATANODE_REST_PORT = 8999; + public static final int DATANODE_OPENSEARCH_HTTP_PORT = 9200; + public static final int DATANODE_OPENSEARCH_TRANSPORT_PORT = 9300; + + private final Network network; + private boolean shouldCloseNetwork = false; + private final MongoDBTestService mongoDBTestService; + private boolean shouldCloseMongodb = false; + private final GenericContainer datanodeContainer; + private String nodeName; + + public DatanodeContainerizedBackend() { + this(new DatanodeDockerHooksAdapter()); + } + + public DatanodeContainerizedBackend(DatanodeDockerHooks hooks) { + this("node1", hooks); + } + + public DatanodeContainerizedBackend(final String nodeName, DatanodeDockerHooks hooks) { + + this.network = Network.newNetwork(); + this.mongoDBTestService = MongoDBTestService.create(MongodbServer.DEFAULT_VERSION, this.network); + this.mongoDBTestService.start(); + + // we have created these resources, we have to close them. + this.shouldCloseNetwork = true; + this.shouldCloseMongodb = true; + + this.datanodeContainer = createDatanodeContainer( + nodeName, + hooks); + } + + public DatanodeContainerizedBackend(Network network, MongoDBTestService mongoDBTestService, String nodeName, DatanodeDockerHooks hooks) { + this.network = network; + this.mongoDBTestService = mongoDBTestService; + this.datanodeContainer = createDatanodeContainer( + nodeName, + hooks); + } + + private GenericContainer createDatanodeContainer(String nodeName, DatanodeDockerHooks customizer) { + this.nodeName = nodeName; + MavenPackager.packageJarIfNecessary(new DefaultMavenProjectDirProvider()); + + return new DatanodeDevContainerBuilder() + .restPort(DATANODE_REST_PORT) + .openSearchHttpPort(DATANODE_OPENSEARCH_HTTP_PORT) + .openSearchTransportPort(DATANODE_OPENSEARCH_TRANSPORT_PORT) + .mongoDbUri(mongoDBTestService.internalUri()) + .nodeName(nodeName) + .network(network) + .passwordSecret(ContainerizedGraylogBackend.PASSWORD_SECRET) + .customizer(customizer) + .build(); + } + + public DatanodeContainerizedBackend start() { + datanodeContainer.start(); + return this; + } + + public void stop() { + datanodeContainer.stop(); + if (shouldCloseMongodb) { + mongoDBTestService.close(); + } + if (shouldCloseNetwork) { + network.close(); + } + } + + public Network getNetwork() { + return network; + } + + public MongoDBTestService getMongoDb() { + return mongoDBTestService; + } + + public String getLogs() { + return datanodeContainer.getLogs(); + } + + public Integer getDatanodeRestPort() { + return datanodeContainer.getMappedPort(DATANODE_REST_PORT); + } + + + public Integer getOpensearchRestPort() { + return datanodeContainer.getMappedPort(DATANODE_OPENSEARCH_HTTP_PORT); + } + + public Integer getOpensearchTransportPort() { + return datanodeContainer.getMappedPort(DATANODE_OPENSEARCH_TRANSPORT_PORT); + } + + public String getNodeName() { + return nodeName; + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDevContainerBuilder.java b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDevContainerBuilder.java new file mode 100644 index 000000000000..af44118d184b --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDevContainerBuilder.java @@ -0,0 +1,232 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.testinfra; + +import com.google.common.base.Suppliers; +import org.graylog.datanode.configuration.OpensearchArchitecture; +import org.graylog.testing.datanode.DatanodeDockerHooks; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; +import org.testcontainers.images.builder.ImageFromDockerfile; +import org.testcontainers.images.builder.dockerfile.DockerfileBuilder; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Duration; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import java.util.function.Supplier; + +import static org.graylog.datanode.testinfra.DatanodeContainerizedBackend.IMAGE_WORKING_DIR; +import static org.graylog.testing.completebackend.DefaultPluginJarsProvider.getProjectReposPath; +import static org.graylog.testing.completebackend.DefaultPluginJarsProvider.getProjectVersion; + +public class DatanodeDevContainerBuilder implements org.graylog.testing.datanode.DatanodeDevContainerBuilder { + private static final Logger LOG = LoggerFactory.getLogger(DatanodeDevContainerBuilder.class); + private static final Supplier imageSupplier = Suppliers.memoize(DatanodeDevContainerBuilder::createImage); + public static final String ENV_INSECURE_STARTUP = "GRAYLOG_DATANODE_INSECURE_STARTUP"; + + private String rootUsername = "admin"; + private String passwordSecret; + private String mongoDbUri; + private int restPort = 8999; + private int openSearchHttpPort = 9200; + private int openSearchTransportPort = 9300; + private String nodeName = "node1"; + private Optional customizer = Optional.empty(); + private Network network; + private Map env; + + protected static Path getPath() { + return getProjectReposPath().resolve(Path.of("graylog2-server", "data-node", "target")); + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder mongoDbUri(final String mongoDbUri) { + this.mongoDbUri = mongoDbUri; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder passwordSecret(final String passwordSecret) { + this.passwordSecret = passwordSecret; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder rootUsername(final String rootUsername) { + this.rootUsername = rootUsername; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder restPort(final int restPort) { + this.restPort = restPort; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder openSearchHttpPort(final int openSearchHttpPort) { + this.openSearchHttpPort = openSearchHttpPort; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder openSearchTransportPort(final int openSearchTransportPort) { + this.openSearchTransportPort = openSearchTransportPort; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder nodeName(final String nodeName) { + this.nodeName = nodeName; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder customizer(final DatanodeDockerHooks hooks) { + this.customizer = Optional.of(hooks); + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder network(final Network network) { + this.network = network; + return this; + } + + @Override + public org.graylog.testing.datanode.DatanodeDevContainerBuilder env(Map env) { + this.env = env; + return this; + } + + public GenericContainer build() { + final Path graylog = getPath().resolve("graylog-datanode-" + getProjectVersion() + ".jar"); + if (!Files.exists(graylog)) { + LOG.info("Searching for {} failed.", graylog.toAbsolutePath()); + LOG.info("Project repos path: {}, absolute path: {}", getProjectReposPath(), getProjectReposPath().toAbsolutePath()); + if (Files.exists(getPath())) { + LOG.info("contents of base path {}:", getPath()); + try (var files = Files.list(getPath())) { + files.forEach(file -> LOG.info("{}", file.toString())); + } catch (IOException ex) { + LOG.info("listing files failed with exception: {}", ex.getMessage()); + } + } else { + LOG.info("Base path {} does not exist.", getPath()); + } + throw new RuntimeException("Failed to link graylog-datanode.jar to the datanode docker image, path " + graylog.toAbsolutePath() + " does not exist! Basepath, it was resolved from is: " + getProjectReposPath()); + } + + GenericContainer container = new GenericContainer<>(imageSupplier.get()) + .withExposedPorts(restPort, openSearchHttpPort) + .withNetwork(network) + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_LOCATION", IMAGE_WORKING_DIR) + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_PLUGINS_LOCATION", IMAGE_WORKING_DIR + "/plugins") + .withEnv(ENV_INSECURE_STARTUP, "true") + .withEnv("GRAYLOG_DATANODE_CONFIG_LOCATION", IMAGE_WORKING_DIR + "/config") // this is the datanode config dir for certs + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_DATA_LOCATION", IMAGE_WORKING_DIR + "/opensearch/data") + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_LOGS_LOCATION", IMAGE_WORKING_DIR + "/opensearch/logs") + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_CONFIG_LOCATION", IMAGE_WORKING_DIR + "/opensearch/config") + + .withEnv("GRAYLOG_DATANODE_MONGODB_URI", mongoDbUri) + .withEnv("GRAYLOG_DATANODE_NODE_NAME", nodeName) + + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_HTTP_PORT", "" + openSearchHttpPort) + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_TRANSPORT_PORT", "" + openSearchTransportPort) + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_DISCOVERY_SEED_HOSTS", nodeName + ":" + openSearchTransportPort) + + .withEnv("GRAYLOG_DATANODE_OPENSEARCH_NETWORK_HOST", nodeName) + .withEnv("GRAYLOG_DATANODE_INITIAL_CLUSTER_MANAGER_NODES", nodeName) + + .withEnv("GRAYLOG_DATANODE_ROOT_USERNAME", rootUsername) + .withEnv("GRAYLOG_DATANODE_PASSWORD_SECRET", passwordSecret) + + .withEnv("GRAYLOG_DATANODE_NODE_ID_FILE", "./node-id") + .withEnv("GRAYLOG_DATANODE_HTTP_BIND_ADDRESS", "0.0.0.0:" + restPort) + + // disable disk threshold in tests, it causes problems in github builds where we don't have + // enough free space + .withEnv("opensearch.cluster.routing.allocation.disk.threshold_enabled", "false") + + .withNetworkAliases(nodeName) + .waitingFor(new LogMessageWaitStrategy() + .withRegEx(".*Graylog DataNode datanode up and running.\n") + .withStartupTimeout(Duration.ofSeconds(60))); + + // explicitly configured ENV variables will override those set above + if(env != null) { + env.forEach(container::withEnv); + } + + final String opensearchDistributionName = "opensearch-" + getOpensearchVersion() + "-linux-" + OpensearchArchitecture.fromOperatingSystem(); + final Path downloadedOpensearch = getPath().resolve(Path.of("opensearch", opensearchDistributionName)); + + if (!Files.exists(downloadedOpensearch)) { + throw new RuntimeException("Failed to link opensearch distribution to the datanode docker image, path " + downloadedOpensearch.toAbsolutePath() + " does not exist!"); + } + + container.withFileSystemBind(graylog.toString(), IMAGE_WORKING_DIR + "/graylog-datanode.jar") + .withFileSystemBind(getPath().resolve("lib").toString(), IMAGE_WORKING_DIR + "/lib/") + .withFileSystemBind(downloadedOpensearch.toString(), IMAGE_WORKING_DIR + "/" + opensearchDistributionName, BindMode.READ_ONLY); + + customizer.ifPresent(c -> c.onContainer(container)); + return container; + } + + private static String getOpensearchVersion() { + try { + final Properties props = new Properties(); + props.load(DatanodeContainerizedBackend.class.getResourceAsStream("/opensearch.properties")); + return props.getProperty("opensearchVersion"); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static ImageFromDockerfile createImage() { + + final ImageFromDockerfile image = new ImageFromDockerfile("local/graylog-datanode:latest", false); + + return image.withDockerfileFromBuilder(builder -> + { + final DockerfileBuilder fileBuilder = builder.from("eclipse-temurin:17-jre-jammy") + .workDir(IMAGE_WORKING_DIR) + .run("mkdir -p opensearch/config") + .run("mkdir -p opensearch/data") + .run("mkdir -p opensearch/logs") + .run("mkdir -p config") + .run("mkdir -p plugins"); + + + fileBuilder.run("touch datanode.conf") // create empty configuration file, required but all config comes via env props + .run("useradd opensearch") + .run("chown -R opensearch:opensearch " + IMAGE_WORKING_DIR) + .user("opensearch") + .expose(DatanodeContainerizedBackend.DATANODE_REST_PORT, DatanodeContainerizedBackend.DATANODE_OPENSEARCH_HTTP_PORT) + .entryPoint("java", "-jar", "graylog-datanode.jar", "datanode", "-f", "datanode.conf"); + builder.build(); + }); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDevContainerProvider.java b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDevContainerProvider.java new file mode 100644 index 000000000000..db41da1d5040 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDevContainerProvider.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.testinfra; + +import org.graylog.testing.datanode.DatanodeDevContainerInterfaceProvider; +import org.graylog2.storage.SearchVersion; + +public class DatanodeDevContainerProvider implements DatanodeDevContainerInterfaceProvider { + @Override + public DatanodeDevContainerBuilder + getBuilderFor(SearchVersion version) { + return new DatanodeDevContainerBuilder(); + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDockerHooksAdapter.java b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDockerHooksAdapter.java new file mode 100644 index 000000000000..f6a8a6761658 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeDockerHooksAdapter.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.testinfra; + +import org.graylog.testing.datanode.DatanodeDockerHooks; +import org.testcontainers.containers.GenericContainer; + +public class DatanodeDockerHooksAdapter implements DatanodeDockerHooks { + + @Override + public void onContainer(GenericContainer datanodeContainer) { + + } +} diff --git a/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeTestExtension.java b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeTestExtension.java new file mode 100644 index 000000000000..ac4537b7a289 --- /dev/null +++ b/data-node/src/test/java/org/graylog/datanode/testinfra/DatanodeTestExtension.java @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.datanode.testinfra; + +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; + +import java.io.IOException; + +public class DatanodeTestExtension implements ParameterResolver, BeforeAllCallback, AfterAllCallback { + + private final DatanodeContainerizedBackend datanodeBackend; + + public DatanodeTestExtension() { + this.datanodeBackend = new DatanodeContainerizedBackend(); + } + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException { + return parameterContext.getParameter().getParameterizedType().equals(DatanodeContainerizedBackend.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException { + if (parameterContext.getParameter().getParameterizedType().equals(DatanodeContainerizedBackend.class)) { + return this.datanodeBackend; + } else { + throw new IllegalArgumentException("Unsupported parameter " + parameterContext.getParameter().getParameterizedType()); + } + } + + @Override + public void beforeAll(ExtensionContext extensionContext) throws IOException { + //TODO: trigger packaging: MavenPackager + datanodeBackend.start(); + } + + @Override + public void afterAll(ExtensionContext extensionContext) throws Exception { + datanodeBackend.stop(); + } +} diff --git a/data-node/src/test/resources/META-INF/services/org.graylog.testing.datanode.DatanodeDevContainerInterfaceProvider b/data-node/src/test/resources/META-INF/services/org.graylog.testing.datanode.DatanodeDevContainerInterfaceProvider new file mode 100644 index 000000000000..53c7b7265748 --- /dev/null +++ b/data-node/src/test/resources/META-INF/services/org.graylog.testing.datanode.DatanodeDevContainerInterfaceProvider @@ -0,0 +1 @@ +org.graylog.datanode.testinfra.DatanodeDevContainerProvider diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/_state/global-13.st b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/_state/global-13.st new file mode 100644 index 000000000000..6190b5b1f552 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/_state/global-13.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/_state/node-0.st b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/_state/node-0.st new file mode 100644 index 000000000000..263ab8454785 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/_state/node-0.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/_state/retention-leases-0.st b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/_state/retention-leases-0.st new file mode 100644 index 000000000000..138f762216c0 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/_state/retention-leases-0.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/_state/state-0.st b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/_state/state-0.st new file mode 100644 index 000000000000..a48ce514a48d Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/_state/state-0.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.cfe b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.cfe new file mode 100644 index 000000000000..5b2ae8f6b2b3 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.cfe differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.cfs b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.cfs new file mode 100644 index 000000000000..86cbcb2ca5c6 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.cfs differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.si b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.si new file mode 100644 index 000000000000..03872aa3accc Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/_0.si differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/segments_3 b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/segments_3 new file mode 100644 index 000000000000..574e206b2631 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/segments_3 differ diff --git a/integration-tests/src/test/resources/integration/seeds/mongodb/graylog/notifications.bson b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/write.lock similarity index 100% rename from integration-tests/src/test/resources/integration/seeds/mongodb/graylog/notifications.bson rename to data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/index/write.lock diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-1.ckp b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-1.ckp new file mode 100644 index 000000000000..2cc89162118a Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-1.ckp differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-1.tlog b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-1.tlog new file mode 100644 index 000000000000..21a4d288cb2f Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-1.tlog differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-2.ckp b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-2.ckp new file mode 100644 index 000000000000..763aa32bffbc Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-2.ckp differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-2.tlog b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-2.tlog new file mode 100644 index 000000000000..fb25e04aa222 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-2.tlog differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-3.tlog new file mode 100644 index 000000000000..21a4d288cb2f Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog.ckp b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog.ckp new file mode 100644 index 000000000000..04e74a9a7255 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/_state/state-2.st b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/_state/state-2.st new file mode 100644 index 000000000000..643c741bf8ac Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/indices/MHXMFgomR8iU-44k_aLZtw/_state/state-2.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch6/nodes/0/node.lock b/data-node/src/test/resources/indices/elasticsearch6/nodes/0/node.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.cfe b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.cfe new file mode 100644 index 000000000000..a376acdb81f7 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.cfe differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.cfs b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.cfs new file mode 100644 index 000000000000..54032a0e7aff Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.cfs differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.si b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.si new file mode 100644 index 000000000000..a4e78e6f0c99 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_13.si differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.cfe b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.cfe new file mode 100644 index 000000000000..c5d0dc54387b Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.cfe differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.cfs b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.cfs new file mode 100644 index 000000000000..3964fe2c5d55 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.cfs differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.si b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.si new file mode 100644 index 000000000000..d10676769643 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/_16.si differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/manifest-0.st b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/manifest-0.st new file mode 100644 index 000000000000..030f9cbe09ea Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/manifest-0.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/node-0.st b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/node-0.st new file mode 100644 index 000000000000..97a62c2567d1 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/node-0.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/segments_1a b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/segments_1a new file mode 100644 index 000000000000..94718a9d1dee Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/segments_1a differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/write.lock b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/_state/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/_state/retention-leases-0.st b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/_state/retention-leases-0.st new file mode 100644 index 000000000000..138f762216c0 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/_state/retention-leases-0.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/_state/state-0.st b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/_state/state-0.st new file mode 100644 index 000000000000..f2f72d6fc077 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/_state/state-0.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.cfe b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.cfe new file mode 100644 index 000000000000..7aee8aba27a5 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.cfe differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.cfs b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.cfs new file mode 100644 index 000000000000..cd5fd87fe82f Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.cfs differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.si b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.si new file mode 100644 index 000000000000..c2fa97631646 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/_0.si differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/segments_3 b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/segments_3 new file mode 100644 index 000000000000..de7aeb6ece39 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/segments_3 differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/write.lock b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/translog/translog-3.tlog new file mode 100644 index 000000000000..2a7a8980e5e2 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/translog/translog.ckp b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/translog/translog.ckp new file mode 100644 index 000000000000..328ea2816ed9 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/_state/state-2.st b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/_state/state-2.st new file mode 100644 index 000000000000..d46a3ed15608 Binary files /dev/null and b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/indices/JwZYQzvUQG6JxLBgMsFfTA/_state/state-2.st differ diff --git a/data-node/src/test/resources/indices/elasticsearch7/nodes/0/node.lock b/data-node/src/test/resources/indices/elasticsearch7/nodes/0/node.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch1/batch_metrics_enabled.conf b/data-node/src/test/resources/indices/opensearch1/batch_metrics_enabled.conf new file mode 100644 index 000000000000..02e4a84d62c4 --- /dev/null +++ b/data-node/src/test/resources/indices/opensearch1/batch_metrics_enabled.conf @@ -0,0 +1 @@ +false \ No newline at end of file diff --git a/data-node/src/test/resources/indices/opensearch1/logging_enabled.conf b/data-node/src/test/resources/indices/opensearch1/logging_enabled.conf new file mode 100644 index 000000000000..02e4a84d62c4 --- /dev/null +++ b/data-node/src/test/resources/indices/opensearch1/logging_enabled.conf @@ -0,0 +1 @@ +false \ No newline at end of file diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.cfe b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.cfe new file mode 100644 index 000000000000..04760ea1597f Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.cfs b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.cfs new file mode 100644 index 000000000000..76f1f7888849 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.si b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.si new file mode 100644 index 000000000000..631af4736f16 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_3.si differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.cfe b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.cfe new file mode 100644 index 000000000000..f0090a13c887 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.cfs b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.cfs new file mode 100644 index 000000000000..d6add5c5863b Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.si b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.si new file mode 100644 index 000000000000..6836cad562de Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/_6.si differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/manifest-0.st b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/manifest-0.st new file mode 100644 index 000000000000..030f9cbe09ea Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/manifest-0.st differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/node-0.st b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/node-0.st new file mode 100644 index 000000000000..348784c5d4c7 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/node-0.st differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/segments_9 b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/segments_9 new file mode 100644 index 000000000000..89cf325d9d15 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/segments_9 differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/write.lock b/data-node/src/test/resources/indices/opensearch1/nodes/0/_state/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/_state/retention-leases-0.st b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/_state/retention-leases-0.st new file mode 100644 index 000000000000..138f762216c0 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/_state/retention-leases-0.st differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/_state/state-0.st b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/_state/state-0.st new file mode 100644 index 000000000000..f13abad2bfae Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/_state/state-0.st differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.cfe b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.cfe new file mode 100644 index 000000000000..586fc8324ced Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.cfs b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.cfs new file mode 100644 index 000000000000..5597e702135d Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.si b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.si new file mode 100644 index 000000000000..0ceccebc5c50 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/_0.si differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/segments_3 b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/segments_3 new file mode 100644 index 000000000000..fc054bbe5112 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/segments_3 differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/write.lock b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/translog/translog-3.tlog new file mode 100644 index 000000000000..d3a5a660e6ca Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/translog/translog.ckp b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/translog/translog.ckp new file mode 100644 index 000000000000..328ea2816ed9 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/_state/state-2.st b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/_state/state-2.st new file mode 100644 index 000000000000..03b050f730e6 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch1/nodes/0/indices/ccMlzvWBQT2YkL8LawXC1Q/_state/state-2.st differ diff --git a/data-node/src/test/resources/indices/opensearch1/nodes/0/node.lock b/data-node/src/test/resources/indices/opensearch1/nodes/0/node.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch1/performance_analyzer_enabled.conf b/data-node/src/test/resources/indices/opensearch1/performance_analyzer_enabled.conf new file mode 100644 index 000000000000..02e4a84d62c4 --- /dev/null +++ b/data-node/src/test/resources/indices/opensearch1/performance_analyzer_enabled.conf @@ -0,0 +1 @@ +false \ No newline at end of file diff --git a/data-node/src/test/resources/indices/opensearch1/rca_enabled.conf b/data-node/src/test/resources/indices/opensearch1/rca_enabled.conf new file mode 100644 index 000000000000..02e4a84d62c4 --- /dev/null +++ b/data-node/src/test/resources/indices/opensearch1/rca_enabled.conf @@ -0,0 +1 @@ +false \ No newline at end of file diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdm b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdm new file mode 100644 index 000000000000..4592f1ff272f Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdt b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdt new file mode 100644 index 000000000000..fc246a706219 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdt differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdx b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdx new file mode 100644 index 000000000000..ab50a81d8da0 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fdx differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fnm b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fnm new file mode 100644 index 000000000000..704f82b1ce28 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.fnm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.si new file mode 100644 index 000000000000..8bcfb27d0ad9 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.doc b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.doc new file mode 100644 index 000000000000..2319d7c9effe Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.doc differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tim b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tim new file mode 100644 index 000000000000..7c20e412fa97 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tim differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tip b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tip new file mode 100644 index 000000000000..97ab0f3420b7 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tip differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tmd b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tmd new file mode 100644 index 000000000000..4c07b5af2193 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_17_Lucene90_0.tmd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.cfe b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.cfe new file mode 100644 index 000000000000..0ed650a42716 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.cfs b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.cfs new file mode 100644 index 000000000000..2d2bbd5b56f9 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.si new file mode 100644 index 000000000000..b0d307db30f2 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/_1a.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/manifest-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/manifest-1.st new file mode 100644 index 000000000000..030f9cbe09ea Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/manifest-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/node-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/node-1.st new file mode 100644 index 000000000000..ff1d46bb168e Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/node-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/segments_1b b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/segments_1b new file mode 100644 index 000000000000..28812772ec01 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/segments_1b differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/write.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/_state/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/_state/retention-leases-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/_state/retention-leases-1.st new file mode 100644 index 000000000000..c22ae3176125 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/_state/retention-leases-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/_state/state-0.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/_state/state-0.st new file mode 100644 index 000000000000..2ea00c44059e Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/_state/state-0.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.cfe b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.cfe new file mode 100644 index 000000000000..8be0b9b7e0f6 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.cfs b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.cfs new file mode 100644 index 000000000000..8f7b8382d2db Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.si new file mode 100644 index 000000000000..d8733240ae8b Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/_0.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/segments_3 b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/segments_3 new file mode 100644 index 000000000000..6d9b7f27169e Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/segments_3 differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/write.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/translog/translog-3.tlog new file mode 100644 index 000000000000..59889b55cc8f Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/translog/translog.ckp b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/translog/translog.ckp new file mode 100644 index 000000000000..43c040560837 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/_state/state-2.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/_state/state-2.st new file mode 100644 index 000000000000..14628efef14d Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/7z16oEKPTjivI0qd4tv36Q/_state/state-2.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/_state/retention-leases-2.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/_state/retention-leases-2.st new file mode 100644 index 000000000000..46b2d7620c0a Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/_state/retention-leases-2.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/_state/state-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/_state/state-1.st new file mode 100644 index 000000000000..03a16b52067b Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/_state/state-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.cfe b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.cfe new file mode 100644 index 000000000000..45633ea7029d Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.cfs b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.cfs new file mode 100644 index 000000000000..dad4ea99dc4c Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.si new file mode 100644 index 000000000000..298a4f1dbcb6 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/_1.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/segments_3 b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/segments_3 new file mode 100644 index 000000000000..7fa4ac784bf9 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/segments_3 differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/write.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/translog/translog-3.tlog new file mode 100644 index 000000000000..073072d3c5bb Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/translog/translog.ckp b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/translog/translog.ckp new file mode 100644 index 000000000000..43c040560837 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/_state/state-3.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/_state/state-3.st new file mode 100644 index 000000000000..c68b7603a362 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/ViOS0qZKRb6LKkIos_3GwQ/_state/state-3.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/_state/retention-leases-2.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/_state/retention-leases-2.st new file mode 100644 index 000000000000..d1a844781355 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/_state/retention-leases-2.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/_state/state-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/_state/state-1.st new file mode 100644 index 000000000000..8ff55c443de9 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/_state/state-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/index/segments_2 b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/index/segments_2 new file mode 100644 index 000000000000..adde766ec88c Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/index/segments_2 differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/index/write.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/translog/translog-3.tlog new file mode 100644 index 000000000000..0edd225eedb5 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/translog/translog.ckp b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/translog/translog.ckp new file mode 100644 index 000000000000..10fdb5ec2a77 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/_state/state-3.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/_state/state-3.st new file mode 100644 index 000000000000..f535164e10f5 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/dIdY0mrJQWOU5Xd1z7vLOw/_state/state-3.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/_state/retention-leases-2.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/_state/retention-leases-2.st new file mode 100644 index 000000000000..6efd71d360cf Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/_state/retention-leases-2.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/_state/state-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/_state/state-1.st new file mode 100644 index 000000000000..4cf08a308046 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/_state/state-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.cfe b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.cfe new file mode 100644 index 000000000000..63804cb63a3f Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.cfs b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.cfs new file mode 100644 index 000000000000..2131d2023886 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.si new file mode 100644 index 000000000000..b1742a6cb331 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1.fnm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1.fnm new file mode 100644 index 000000000000..32e1ab2ddbb3 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1.fnm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1_Lucene90_0.dvd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1_Lucene90_0.dvd new file mode 100644 index 000000000000..5441f6263efb Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1_Lucene90_0.dvd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1_Lucene90_0.dvm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1_Lucene90_0.dvm new file mode 100644 index 000000000000..fdbf289be0c6 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_5_1_Lucene90_0.dvm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.cfe b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.cfe new file mode 100644 index 000000000000..1504035c1d15 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.cfs b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.cfs new file mode 100644 index 000000000000..5ff3236d644a Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.si new file mode 100644 index 000000000000..6207cdc907eb Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1.fnm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1.fnm new file mode 100644 index 000000000000..1f043322ba9a Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1.fnm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1_Lucene90_0.dvd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1_Lucene90_0.dvd new file mode 100644 index 000000000000..0b1fcf6a1dde Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1_Lucene90_0.dvd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1_Lucene90_0.dvm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1_Lucene90_0.dvm new file mode 100644 index 000000000000..4682d72cd4c6 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_6_1_Lucene90_0.dvm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdm new file mode 100644 index 000000000000..0083bd4b5532 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdt b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdt new file mode 100644 index 000000000000..4d288506de5c Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdt differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdx b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdx new file mode 100644 index 000000000000..407d3c1549aa Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fdx differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fnm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fnm new file mode 100644 index 000000000000..268243ae8896 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.fnm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdd new file mode 100644 index 000000000000..f35eeb0a67c2 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdi b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdi new file mode 100644 index 000000000000..3804146751f7 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdi differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdm new file mode 100644 index 000000000000..f56a42731cb4 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.kdm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.nvd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.nvd new file mode 100644 index 000000000000..54954dc4f91c Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.nvd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.nvm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.nvm new file mode 100644 index 000000000000..bd1ba8777b6d Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.nvm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.si new file mode 100644 index 000000000000..4e4487ab9435 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1.fnm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1.fnm new file mode 100644 index 000000000000..d93ef406ab46 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1.fnm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1_Lucene90_0.dvd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1_Lucene90_0.dvd new file mode 100644 index 000000000000..49072d72b220 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1_Lucene90_0.dvd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1_Lucene90_0.dvm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1_Lucene90_0.dvm new file mode 100644 index 000000000000..b3a3a6e9845e Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_1_Lucene90_0.dvm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.doc b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.doc new file mode 100644 index 000000000000..48e88defe884 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.doc differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.dvd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.dvd new file mode 100644 index 000000000000..35b111a0aa8f Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.dvd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.dvm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.dvm new file mode 100644 index 000000000000..7d2bfaeb25a3 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.dvm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.pos b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.pos new file mode 100644 index 000000000000..6d7aa25a7990 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.pos differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tim b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tim new file mode 100644 index 000000000000..b3289fbc326d Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tim differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tip b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tip new file mode 100644 index 000000000000..509064df7b8e Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tip differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tmd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tmd new file mode 100644 index 000000000000..0b318927f5cc Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_7_Lucene90_0.tmd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdm new file mode 100644 index 000000000000..be0091fb6f23 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdt b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdt new file mode 100644 index 000000000000..93b45552e084 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdt differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdx b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdx new file mode 100644 index 000000000000..1b93a0c02e88 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fdx differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fnm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fnm new file mode 100644 index 000000000000..2f13b2fa38d8 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.fnm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdd new file mode 100644 index 000000000000..0879904f6efc Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdi b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdi new file mode 100644 index 000000000000..5eb8c388e8f9 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdi differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdm new file mode 100644 index 000000000000..1db9aad8a92c Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.kdm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.nvd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.nvd new file mode 100644 index 000000000000..e8a9f6bb20ed Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.nvd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.nvm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.nvm new file mode 100644 index 000000000000..8901dea94926 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.nvm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.si new file mode 100644 index 000000000000..fff33c93d9fe Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.doc b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.doc new file mode 100644 index 000000000000..b281f0cce9be Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.doc differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.dvd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.dvd new file mode 100644 index 000000000000..4a5394e188f1 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.dvd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.dvm b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.dvm new file mode 100644 index 000000000000..d175e53474de Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.dvm differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.pos b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.pos new file mode 100644 index 000000000000..226461e06f88 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.pos differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tim b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tim new file mode 100644 index 000000000000..47909ec6a6bb Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tim differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tip b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tip new file mode 100644 index 000000000000..4af5b7ed82c0 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tip differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tmd b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tmd new file mode 100644 index 000000000000..38162d961df2 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/_8_Lucene90_0.tmd differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/segments_4 b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/segments_4 new file mode 100644 index 000000000000..00d83cc57b03 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/segments_4 differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/write.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/translog/translog-4.tlog b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/translog/translog-4.tlog new file mode 100644 index 000000000000..193761e18bcc Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/translog/translog-4.tlog differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/translog/translog.ckp b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/translog/translog.ckp new file mode 100644 index 000000000000..f3e56692d236 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/_state/state-5.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/_state/state-5.st new file mode 100644 index 000000000000..8ca626cfb8e0 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/lmXxSRU5RkGiY4rPrDBAjQ/_state/state-5.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/_state/retention-leases-2.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/_state/retention-leases-2.st new file mode 100644 index 000000000000..c511d21a864b Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/_state/retention-leases-2.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/_state/state-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/_state/state-1.st new file mode 100644 index 000000000000..4cd74c9f6b2a Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/_state/state-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.cfe b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.cfe new file mode 100644 index 000000000000..216c546f2a74 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.cfs b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.cfs new file mode 100644 index 000000000000..16f430a05e11 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.si new file mode 100644 index 000000000000..8c18aeaf590f Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/_a.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/segments_3 b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/segments_3 new file mode 100644 index 000000000000..d67df14d03e5 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/segments_3 differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/write.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/translog/translog-3.tlog new file mode 100644 index 000000000000..5120b9d09fca Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/translog/translog.ckp b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/translog/translog.ckp new file mode 100644 index 000000000000..cbdf6ab74b44 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/_state/state-13.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/_state/state-13.st new file mode 100644 index 000000000000..e46610176572 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/y4NCxztMRCyCP-pxj9xUGQ/_state/state-13.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/_state/retention-leases-2.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/_state/retention-leases-2.st new file mode 100644 index 000000000000..de20f9671660 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/_state/retention-leases-2.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/_state/state-1.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/_state/state-1.st new file mode 100644 index 000000000000..ae20c4217a96 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/_state/state-1.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.cfe b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.cfe new file mode 100644 index 000000000000..cd9bbc8157fa Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.cfe differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.cfs b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.cfs new file mode 100644 index 000000000000..b310c4525601 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.cfs differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.si b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.si new file mode 100644 index 000000000000..381ec8e4e9d5 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/_6.si differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/segments_3 b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/segments_3 new file mode 100644 index 000000000000..09da586ad6b6 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/segments_3 differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/write.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/index/write.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/translog/translog-3.tlog b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/translog/translog-3.tlog new file mode 100644 index 000000000000..c0bd740bb8d7 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/translog/translog-3.tlog differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/translog/translog.ckp b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/translog/translog.ckp new file mode 100644 index 000000000000..102502da083f Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/0/translog/translog.ckp differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/_state/state-4.st b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/_state/state-4.st new file mode 100644 index 000000000000..b344ac7f0f67 Binary files /dev/null and b/data-node/src/test/resources/indices/opensearch2/nodes/0/indices/z4XbK09XTuq5AQXoeeU-Jg/_state/state-4.st differ diff --git a/data-node/src/test/resources/indices/opensearch2/nodes/0/node.lock b/data-node/src/test/resources/indices/opensearch2/nodes/0/node.lock new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/data-node/src/test/resources/opensearch.properties b/data-node/src/test/resources/opensearch.properties new file mode 100644 index 000000000000..40be8ceb8603 --- /dev/null +++ b/data-node/src/test/resources/opensearch.properties @@ -0,0 +1,2 @@ +# Purpose of this file is to make properties defined in pom.xml available in the tests +opensearchVersion=${opensearch.version} diff --git a/data-node/src/test/resources/org/graylog/datanode/process/test-script.sh b/data-node/src/test/resources/org/graylog/datanode/process/test-script.sh new file mode 100755 index 000000000000..5d35cbe200c1 --- /dev/null +++ b/data-node/src/test/resources/org/graylog/datanode/process/test-script.sh @@ -0,0 +1,19 @@ +#!/bin/sh +# First output some messages to stdout and stderr +echo Hello World +echo This message goes to stderr >&2 +echo second line +echo third line + + +if [ $# -eq 0 ] + # no argument, keep spinning forever + then + while true; do + sleep 20 + done + else + # we have an argument, use it as an exit code + exit $1 +fi + diff --git a/distribution/pom.xml b/distribution/pom.xml new file mode 100644 index 000000000000..695633731190 --- /dev/null +++ b/distribution/pom.xml @@ -0,0 +1,86 @@ + + + + + graylog-parent + org.graylog + 6.2.0-SNAPSHOT + + 4.0.0 + + distribution + Graylog Binary Distribution Tarball + Module solely performing final assembly step after all other modules artifacts have been built + + + + true + true + true + true + true + yyyyMMddHHmmss + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + generate-server-artifact + package + + single + + + + + true + true + + src/main/assembly/graylog.xml + + + ${project.basedir}/../target/assembly + graylog-${project.version}-${maven.build.timestamp} + + + + + + + org.graylog + graylog-storage-elasticsearch7 + ${project.version} + + + org.graylog + graylog-storage-opensearch2 + ${project.version} + + + diff --git a/distribution/src/main/assembly/graylog.xml b/distribution/src/main/assembly/graylog.xml new file mode 100644 index 000000000000..3a6e57ff5f24 --- /dev/null +++ b/distribution/src/main/assembly/graylog.xml @@ -0,0 +1,70 @@ + + + graylog-server-tarball + + tar.gz + + false + + + ${project.basedir}/.. + + README.markdown + LICENSE + UPGRADING.rst + + + + ${project.basedir}/../graylog2-server/target/sbom + + graylog-server-*-sbom.json + graylog-server-*-sbom.xml + + sbom + + + + + ./ + log + + */** + + + + ./ + plugin + + */** + + + + + + ${project.basedir}/../graylog2-server/target/graylog2-server-${project.version}-shaded.jar + graylog.jar + . + + + ${project.basedir}/../bin/graylogctl + bin/ + 0755 + + + ${project.basedir}/../misc/graylog.conf + graylog.conf.example + . + + + ${project.basedir}/../graylog-storage-elasticsearch7/target/graylog-storage-elasticsearch7-${project.version}.jar + plugin/ + + + ${project.basedir}/../graylog-storage-opensearch2/target/graylog-storage-opensearch2-${project.version}.jar + plugin/ + + + diff --git a/docs/cef/README.md b/docs/cef/README.md index 21c78c0b4834..82f85dac1aea 100644 --- a/docs/cef/README.md +++ b/docs/cef/README.md @@ -1,7 +1,5 @@ # Graylog CEF message input -[![Build Status](https://travis-ci.org/Graylog2/graylog-plugin-cef.svg?branch=master)](https://travis-ci.org/Graylog2/graylog-plugin-cef) - Graylog input plugin to receive CEF logs via UDP or TCP. Install the plugin and launch a new CEF input from `System -> Inputs` in your Graylog Web Interface. This plugin is strictly following the CEF standard and will probably not work with non-compliant messages. Please open an issue in this repository in case of any problems. @@ -28,7 +26,7 @@ Some systems will send CEF as part of a RFC compliant syslog message. In this ca ### Parsing raw CEF or CEF embedded in any other envelopes -If the envelope is not syslog or the CEF message is not in an envelope at all, you can use the [Graylog Processing Pipelines](http://docs.graylog.org/en/latest/pages/pipelines.html) and the `parse_cef` function this plugin provides: +If the envelope is not syslog or the CEF message is not in an envelope at all, you can use the [Graylog Processing Pipelines](https://docs.graylog.org/docs/processing-pipelines) and the `parse_cef` function this plugin provides: 1. Use a pipeline rule to parse out the CEF part of the message (for example, using regex) and then apply the `parse_cef()` function on that extracted string. 1. If desired, use a second pipeline step to rename the `cef_` prefixed message fields to something easier to use and easier to remember. @@ -53,4 +51,4 @@ $ mvn release:prepare $ mvn release:perform ``` -This sets the version numbers, creates a tag and pushes to GitHub. Travis CI will build the release artifacts and upload to GitHub automatically. +This sets the version numbers, creates a tag and pushes to GitHub. diff --git a/docs/netflow/README.md b/docs/netflow/README.md index c1464f0819e7..0a4e2488559a 100644 --- a/docs/netflow/README.md +++ b/docs/netflow/README.md @@ -1,8 +1,6 @@ NetFlow Plugin for Graylog ========================== -[![Build Status](https://travis-ci.org/Graylog2/graylog-plugin-netflow.svg?branch=master)](https://travis-ci.org/Graylog2/graylog-plugin-netflow) - This plugin provides a NetFlow UDP input to act as a Flow collector that receives data from Flow exporters. Each received Flow will be converted to a Graylog message. **Required Graylog version:** 2.3.0 and later @@ -83,4 +81,4 @@ $ mvn release:prepare $ mvn release:perform ``` -This sets the version numbers, creates a tag and pushes to GitHub. Travis CI will build the release artifacts and upload to GitHub automatically. +This sets the version numbers, creates a tag and pushes to GitHub. diff --git a/full-backend-tests/pom.xml b/full-backend-tests/pom.xml new file mode 100644 index 000000000000..27fdfd37876a --- /dev/null +++ b/full-backend-tests/pom.xml @@ -0,0 +1,235 @@ + + + + 4.0.0 + + org.graylog + graylog-project-parent + 6.2.0-SNAPSHOT + ../graylog-project-parent + + + full-backend-tests + jar + + full-backend-tests + Graylog REST API integration tests + + + true + true + true + true + + + + + + org.graylog2 + graylog2-server + ${project.version} + + + + org.graylog2 + graylog2-server + ${project.version} + test-jar + test + + + + org.graylog + graylog-storage-elasticsearch7 + ${project.version} + + + org.graylog + graylog-storage-elasticsearch7 + ${project.version} + test-jar + test + + + + org.graylog + graylog-storage-opensearch2 + ${project.version} + + + org.graylog + graylog-storage-opensearch2 + ${project.version} + test-jar + test + + + + + org.apache.logging.log4j + log4j-api + + + org.apache.logging.log4j + log4j-core + + + org.apache.logging.log4j + log4j-slf4j2-impl + + + org.slf4j + jcl-over-slf4j + + + org.slf4j + log4j-over-slf4j + + + org.slf4j + slf4j-api + + + + com.google.guava + guava + + + io.rest-assured + rest-assured + test + + + io.rest-assured + json-path + test + + + org.assertj + assertj-joda-time + + + joda-time + joda-time + + + com.fasterxml.jackson.core + jackson-core + test + + + com.fasterxml.jackson.core + jackson-databind + test + + + commons-io + commons-io + test + + + + + org.junit.jupiter + junit-jupiter + test + + + org.mockito + mockito-core + + + org.mockito + mockito-junit-jupiter + test + + + org.assertj + assertj-core + + + org.hamcrest + hamcrest + test + + + org.testcontainers + testcontainers + test + + + org.testcontainers + elasticsearch + test + + + org.opensearch + opensearch-testcontainers + ${testcontainers.opensearch.version} + test + + + + + + + com.mycila + license-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + + + + + datanode + + + !skip.datanode + + + + + org.graylog2 + data-node + ${project.version} + + + org.graylog2 + data-node + ${project.version} + test-jar + test + + + + + diff --git a/full-backend-tests/src/test/java/org/graylog/events/processor/aggregation/EventNotificationsResourceIT.java b/full-backend-tests/src/test/java/org/graylog/events/processor/aggregation/EventNotificationsResourceIT.java new file mode 100644 index 000000000000..a5df9b9f7602 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/events/processor/aggregation/EventNotificationsResourceIT.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.events.processor.aggregation; + +import com.github.rholder.retry.RetryException; +import org.assertj.core.api.Assertions; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.WebhookRequest; +import org.graylog.testing.completebackend.WebhookServerInstance; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeEach; + +import java.util.List; +import java.util.concurrent.ExecutionException; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = {SearchServer.DATANODE_DEV}, withWebhookServerEnabled = true) +public class EventNotificationsResourceIT { + private final GraylogApis graylogApis; + private final WebhookServerInstance webhookTester; + + public EventNotificationsResourceIT(GraylogApis graylogApis) { + this.graylogApis = graylogApis; + webhookTester = graylogApis.backend().getWebhookServerInstance().orElseThrow(() -> new IllegalStateException("Webhook tester instance not found!")); + } + + @BeforeEach + void setUp() { + graylogApis.system().urlWhitelist(webhookTester.getContainerizedCollectorURI()); + } + + @ContainerMatrixTest + void testNotificationTestTrigger() throws ExecutionException, RetryException { + + final String httpNotificationID = graylogApis.eventsNotifications().createHttpNotification(webhookTester.getContainerizedCollectorURI()); + + // now trigger the test of the notification, we should immediately see one recorded webhook afterward + graylogApis.post("/events/notifications/" + httpNotificationID + "/test", "", 200); + + // wait for the just triggered notification + webhookTester.waitForRequests(webhookRequest -> webhookRequest.body().contains("TEST_NOTIFICATION_ID")); + + // the wait succeeded, cleanup + graylogApis.eventsNotifications().deleteNotification(httpNotificationID); + + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/events/processor/aggregation/PivotAggregationSearchIT.java b/full-backend-tests/src/test/java/org/graylog/events/processor/aggregation/PivotAggregationSearchIT.java new file mode 100644 index 000000000000..696b576866a3 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/events/processor/aggregation/PivotAggregationSearchIT.java @@ -0,0 +1,255 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.events.processor.aggregation; + +import com.github.rholder.retry.RetryException; +import org.assertj.core.api.Assertions; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.WebhookRequest; +import org.graylog.testing.completebackend.WebhookServerInstance; +import org.graylog.testing.completebackend.apis.DefaultStreamMatches; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Streams; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.plugin.streams.StreamRuleType; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.concurrent.ExecutionException; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = {SearchServer.ES7, SearchServer.OS2_LATEST, SearchServer.DATANODE_DEV}, withWebhookServerEnabled = true) +public class PivotAggregationSearchIT { + private static final Logger LOG = LoggerFactory.getLogger(PivotAggregationSearchIT.class); + private static final String indexSetPrefix = "pivot-search-test"; + + private final GraylogApis apis; + private final WebhookServerInstance webhookTester; + private String indexSetId; + private String streamId; + private String isolatedStreamId; + + public PivotAggregationSearchIT(GraylogApis apis) { + this.apis = apis; + this.webhookTester = apis.backend().getWebhookServerInstance().orElseThrow(() -> new IllegalStateException("Webhook tester not found!")); + } + + @BeforeEach + void setUp() { + this.indexSetId = apis.indices().createIndexSet("Pivot Aggregation Search Test", "", indexSetPrefix); + apis.indices().waitFor(() -> apis.backend().searchServerInstance().client().indicesExists(indexSetPrefix + "_0", indexSetPrefix + "_deflector"), + "Timed out waiting for index/deflector to be created."); + this.streamId = apis.streams().createStream( + "Stream for Pivot Aggregation Search Test", + this.indexSetId, + true, + DefaultStreamMatches.REMOVE, + new Streams.StreamRule(StreamRuleType.EXACT.toInteger(), "example.org", "source", false) + ); + } + + @AfterEach + void tearDown() { + apis.streams().deleteStream(this.streamId); + if (this.isolatedStreamId != null) { + apis.streams().deleteStream(this.isolatedStreamId); + this.isolatedStreamId = null; + } + apis.indices().deleteIndexSet(this.indexSetId, true); + apis.indices().waitFor(() -> !apis.backend().searchServerInstance().client().indicesExists(indexSetPrefix + "_0") + && !apis.backend().searchServerInstance().client().indicesExists(indexSetPrefix + "_deflector"), + "Timed out waiting for index/deflector to be deleted."); + } + + @ContainerMatrixTest + void testPivotAggregationSearchAllKnownFields() throws ExecutionException, RetryException { + apis.system().urlWhitelist(webhookTester.getContainerizedCollectorURI()); + + final String notificationID = apis.eventsNotifications().createHttpNotification(webhookTester.getContainerizedCollectorURI()); + + final String eventDefinitionID = apis.eventDefinitions().createEventDefinition(notificationID, List.of( + "http_response_code", + "type" + )); + + postMessages(); + + waitForWebHook(eventDefinitionID, "my alert def: 200|ssh - count()=3.0"); + + apis.eventsNotifications().deleteNotification(notificationID); + apis.eventDefinitions().deleteDefinition(eventDefinitionID); + } + + @ContainerMatrixTest + void testPivotAggregationSearchOneUnknownField() throws ExecutionException, RetryException { + apis.system().urlWhitelist(webhookTester.getContainerizedCollectorURI()); + + final String notificationID = apis.eventsNotifications().createHttpNotification(webhookTester.getContainerizedCollectorURI()); + + final String eventDefinitionID = apis.eventDefinitions().createEventDefinition(notificationID, List.of( + "http_response_code", + "unknown_field", + "type" + )); + + postMessages(); + + waitForWebHook(eventDefinitionID, "my alert def: 200|(Empty Value)|ssh - count()=3.0"); + + apis.eventsNotifications().deleteNotification(notificationID); + apis.eventDefinitions().deleteDefinition(eventDefinitionID); + } + + @ContainerMatrixTest + void testPivotAggregationSearchAllUnknownFields() throws ExecutionException, RetryException { + apis.system().urlWhitelist(webhookTester.getContainerizedCollectorURI()); + + final String notificationID = apis.eventsNotifications().createHttpNotification(webhookTester.getContainerizedCollectorURI()); + + final String eventDefinitionID = apis.eventDefinitions().createEventDefinition(notificationID, List.of( + "unknown_field_1", + "unknown_field_2", + "unknown_field_3" + )); + + postMessages(); + + waitForWebHook(eventDefinitionID, "my alert def: (Empty Value)|(Empty Value)|(Empty Value) - count()=3.0"); + + apis.eventsNotifications().deleteNotification(notificationID); + apis.eventDefinitions().deleteDefinition(eventDefinitionID); + } + + @ContainerMatrixTest + void testPivotAggregationIsolatedToStream() throws ExecutionException, RetryException { + apis.system().urlWhitelist(webhookTester.getContainerizedCollectorURI()); + + final String notificationID = apis.eventsNotifications().createHttpNotification(webhookTester.getContainerizedCollectorURI()); + + this.isolatedStreamId = apis.streams().createStream( + "Stream for testing event definition isolation", + this.indexSetId, + true, + DefaultStreamMatches.REMOVE, + new Streams.StreamRule(StreamRuleType.EXACT.toInteger(), "stream_isolation_test", "facility", false) + ); + + final String eventDefinitionID = apis.eventDefinitions().createEventDefinition(notificationID, List.of(), List.of(isolatedStreamId)); + + postMessagesToOtherStream(); + postMessages(); + + waitForWebHook(eventDefinitionID, "my alert def: count()=1.0"); + + apis.eventsNotifications().deleteNotification(notificationID); + apis.eventDefinitions().deleteDefinition(eventDefinitionID); + } + + @ContainerMatrixTest + void testPivotAggregationWithGroupingIsIsolatedToStream() throws ExecutionException, RetryException { + apis.system().urlWhitelist(webhookTester.getContainerizedCollectorURI()); + + final String notificationID = apis.eventsNotifications().createHttpNotification(webhookTester.getContainerizedCollectorURI()); + + this.isolatedStreamId = apis.streams().createStream( + "Stream for testing event definition isolation", + this.indexSetId, + true, + DefaultStreamMatches.REMOVE, + new Streams.StreamRule(StreamRuleType.EXACT.toInteger(), "stream_isolation_test", "facility", false) + ); + + final String eventDefinitionID = apis.eventDefinitions().createEventDefinition(notificationID, List.of("http_response_code"), List.of(isolatedStreamId)); + + postMessagesToOtherStream(); + postMessages(); + + waitForWebHook(eventDefinitionID, "my alert def: 500 - count()=1.0"); + + apis.eventsNotifications().deleteNotification(notificationID); + apis.eventDefinitions().deleteDefinition(eventDefinitionID); + } + + private void waitForWebHook(String eventDefinitionID, String eventMessage) throws ExecutionException, RetryException { + try { + final List requests = webhookTester.waitForRequests((req) -> req.bodyAsJsonPath().read("event_definition_id").equals(eventDefinitionID)); + Assertions.assertThat(requests) + .isNotEmpty() + .allSatisfy(req -> { + final String message = req.bodyAsJsonPath().read("event.message"); + Assertions.assertThat(message).isEqualTo(eventMessage); + }); + + } catch (ExecutionException | RetryException e) { + LOG.error(this.apis.backend().getLogs()); + throw e; + } + } + + private void postMessages() { + apis.gelf().createGelfHttpInput() + .postMessage(""" + { + "short_message":"pivot-aggregation-search-test-1", + "host":"example.org", + "type":"ssh", + "source":"example.org", + "http_response_code":200, + "resource": "posts" + }""") + .postMessage(""" + { + "short_message":"pivot-aggregation-search-test-2", + "host":"example.org", + "type":"ssh", + "source":"example.org", + "http_response_code":200, + "resource": "posts" + }""") + .postMessage(""" + { + "short_message":"pivot-aggregation-search-test-3", + "host":"example.org", + "type":"ssh", + "source":"example.org", + "http_response_code":200, + "resource": "posts" + }"""); + apis.search().waitForMessagesCount(3); + apis.backend().searchServerInstance().client().refreshNode(); + } + + private void postMessagesToOtherStream() { + apis.gelf().createGelfHttpInput() + .postMessage(""" + { + "short_message":"pivot-aggregation-search-test-1", + "host":"example.org", + "type":"ssh", + "source":"example.org", + "http_response_code":500, + "resource": "posts", + "facility": "stream_isolation_test" + }"""); + apis.search().waitForMessagesCount(1); + apis.backend().searchServerInstance().client().refreshNode(); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/exceptionmappers/JsonParsingErrorsIT.java b/full-backend-tests/src/test/java/org/graylog/exceptionmappers/JsonParsingErrorsIT.java new file mode 100644 index 000000000000..c4befbc5a311 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/exceptionmappers/JsonParsingErrorsIT.java @@ -0,0 +1,166 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.exceptionmappers; + +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = SearchServer.OS2_LATEST) +public class JsonParsingErrorsIT { + private static final String SYNC_SEARCH = "/views/search/sync"; + private static final String STREAMS = "/streams"; + + private final GraylogApis api; + + public JsonParsingErrorsIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void returnsSpecificErrorWhenTypeMismatches() { + assertErrorResponse(SYNC_SEARCH, """ + { + "queries": [ + { + "id": "75988e96-71e2-4f3f-9d14-d8e918571b16", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": "foo" + } + } + ] + } + """) + .body("path", equalTo("queries.[0].timerange.from")) + .body("line", equalTo(11)) + .body("column", equalTo(14)) + .body("message", equalTo("Error at \"queries.[0].timerange.from\" [11, 14]: Must be of type int")) + .body("reference_path", equalTo( + "org.graylog.plugins.views.search.rest.AutoValue_SearchDTO$Builder[\"queries\"]" + + "->java.util.LinkedHashSet[0]->org.graylog.plugins.views.search.rest.AutoValue_QueryDTO$Builder[\"timerange\"]" + + "->org.graylog2.plugin.indexer.searches.timeranges.AutoValue_RelativeRange$Builder[\"from\"]")); + + } + + @ContainerMatrixTest + void returnsSpecificErrorForJsonParsingError() { + assertErrorResponse(SYNC_SEARCH, """ + { + "queries": [ + { + "id": "75988e96-71e2-4f3f-9d14-d8e918571b16", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 23, + } + } + ] + } + """) + .body("path", equalTo("queries.[0].timerange")) + .body("line", equalTo(12)) + .body("column", equalTo(5)) + .body("message", containsString("Unexpected character ('}' (code 125)): was expecting double-quote to start field name")) + .body("reference_path", equalTo( + "org.graylog.plugins.views.search.rest.AutoValue_SearchDTO$Builder[\"queries\"]" + + "->java.util.LinkedHashSet[0]" + + "->org.graylog.plugins.views.search.rest.AutoValue_QueryDTO$Builder[\"timerange\"]")); + + } + + @ContainerMatrixTest + void extractsReferencePathFromMissingProperty() { + assertErrorResponse(STREAMS, "{}") + .body("reference_path", equalTo("org.graylog2.rest.resources.streams.requests.CreateStreamRequest")); + + assertErrorResponse(STREAMS, """ + { + "title": "Foo", + "rules": [{}] + } + """) + .body("reference_path", equalTo( + "org.graylog2.rest.resources.streams.requests.CreateStreamRequest[\"rules\"]" + + "->java.util.ArrayList[0]" + + "->org.graylog2.rest.resources.streams.rules.requests.CreateStreamRuleRequest")); + } + + @ContainerMatrixTest + void handlesGenericJSONErrorsOnRootLevel() { + assertErrorResponse(STREAMS, """ + { + "title": "Foo", + } + """) + .body("message", equalTo("Unexpected character ('}' (code 125)): was expecting double-quote to start field name")) + .body("line", equalTo(3)) + .body("column", equalTo(1)); + + assertErrorResponse(STREAMS, """ + { + "title":: "Foo" + } + """) + .body("message", equalTo("Unexpected character (':' (code 58)): expected a valid value (JSON String, Number, Array, Object or token 'null', 'true' or 'false')")) + .body("line", equalTo(2)) + .body("column", equalTo(13)); + } + + @ContainerMatrixTest + void handleInvalidPropertiesOnRootLevel() { + assertErrorResponse(SYNC_SEARCH, """ + { + "foo": 23 + } + """) + .body("message", equalTo("Unable to map property foo.\nKnown properties include: parameters, id, queries, skip_no_streams_check")) + .body("line", equalTo(2)) + .body("column", equalTo(14)) + .body("path", equalTo("foo")) + .body("reference_path", equalTo("org.graylog.plugins.views.search.rest.AutoValue_SearchDTO$Builder[\"foo\"]")); + } + + private ValidatableResponse assertErrorResponse(String url, String body) { + return given() + .spec(api.requestSpecification()) + .log().ifValidationFails() + .when() + .body(body) + .post(url) + .then() + .log().ifValidationFails() + .assertThat() + .statusCode(400) + .body("type", equalTo("RequestError")); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/pipelines/StreamsPipelinesIT.java b/full-backend-tests/src/test/java/org/graylog/pipelines/StreamsPipelinesIT.java new file mode 100644 index 000000000000..4d6050db8197 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/pipelines/StreamsPipelinesIT.java @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.pipelines; + +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.graylog2.plugin.streams.Stream.DEFAULT_EVENTS_STREAM_ID; +import static org.graylog2.plugin.streams.Stream.DEFAULT_STREAM_ID; +import static org.graylog2.plugin.streams.Stream.DEFAULT_SYSTEM_EVENTS_STREAM_ID; + +@ContainerMatrixTestsConfiguration +public class StreamsPipelinesIT { + private static final String pipeline1Title = "Test Pipeline 1"; + private static final String pipeline2Title = "Test Pipeline 2"; + private final GraylogApis api; + + private String indexSetId; + private String stream1Id; + private String stream2Id; + private String stream3Id; + private String pipeline1Id; + private String pipeline2Id; + + public StreamsPipelinesIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + void beforeAll() { + this.indexSetId = api.indices().createIndexSet("Test Indices", "Some test indices", "streamstest"); + this.stream1Id = api.streams().createStream("New Stream 1", this.indexSetId); + this.stream2Id = api.streams().createStream("New Stream 2", this.indexSetId); + this.stream3Id = api.streams().createStream("New Stream 3", this.indexSetId); + this.pipeline1Id = api.pipelines().create(pipeline1Title, Set.of(stream1Id, stream2Id)); + this.pipeline2Id = api.pipelines().create(pipeline2Title, Set.of(stream1Id, stream3Id)); + } + + @AfterAll + void afterAll() { + api.pipelines().delete(pipeline1Id); + api.pipelines().delete(pipeline2Id); + api.streams().deleteStream(this.stream1Id); + api.streams().deleteStream(this.stream2Id); + api.streams().deleteStream(this.stream3Id); + api.indices().deleteIndexSet(this.indexSetId, true); + } + + private record BulkPipelinesRequest(Collection streamIds) {} + + @ContainerMatrixTest + void bulkRetrievalOfPipelineConnections() throws Exception { + final var result = api.post("/streams/pipelines", new BulkPipelinesRequest(Set.of(stream1Id, stream2Id, stream3Id)), 200) + .extract().body().jsonPath(); + final var pipeline1 = pipelineSummary(pipeline1Id, pipeline1Title); + final var pipeline2 = pipelineSummary(pipeline2Id, pipeline2Title); + + assertThat(result.getList(stream1Id)).containsExactlyInAnyOrder(pipeline1, pipeline2); + assertThat(result.getList(stream2Id)).containsExactlyInAnyOrder(pipeline1); + assertThat(result.getList(stream3Id)).containsExactlyInAnyOrder(pipeline2); + } + + @ContainerMatrixTest + void bulkRetrievalOfPipelineConnectionsForBuiltinStreams() throws Exception { + final var result = api.post("/streams/pipelines", new BulkPipelinesRequest(Set.of(DEFAULT_STREAM_ID, DEFAULT_EVENTS_STREAM_ID, DEFAULT_SYSTEM_EVENTS_STREAM_ID)), 200) + .extract().body().jsonPath(); + + assertThat(result.getList(DEFAULT_STREAM_ID)).isEmpty(); + assertThat(result.getList(DEFAULT_EVENTS_STREAM_ID)).isEmpty(); + assertThat(result.getList(DEFAULT_SYSTEM_EVENTS_STREAM_ID)).isEmpty(); + } + + @ContainerMatrixTest + void bulkRetrievalOfPipelineConnectionsForDanglingReferences() throws Exception { + final var defaultIndexSet = api.indices().defaultIndexSetId(); + final var streamId = api.streams().createStream("Stream with dangling pipeline reference", defaultIndexSet); + final var pipelineId = api.pipelines().create("A pipeline which is about to get deleted", Set.of(streamId)); + api.pipelines().delete(pipelineId); + + final var result = api.post("/streams/pipelines", new BulkPipelinesRequest(Set.of(streamId)), 200) + .extract().body().jsonPath(); + + assertThat(result.getList(streamId)).isEmpty(); + } + + @ContainerMatrixTest + void retrievePipelineConnectionsForASingleStream() { + var result = api.get("/streams/" + stream1Id + "/pipelines", 200) + .extract().body().jsonPath(); + + final var pipeline1 = pipelineSummary(pipeline1Id, pipeline1Title); + final var pipeline2 = pipelineSummary(pipeline2Id, pipeline2Title); + + assertThat(result.getList("")).containsExactlyInAnyOrder(pipeline1, pipeline2); + } + + private Map pipelineSummary(String id, String title) { + return Map.of("id", id, "title", title); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/FavoritesIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/FavoritesIT.java new file mode 100644 index 000000000000..866389ef283d --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/FavoritesIT.java @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Users; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.util.Collections; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.core.IsEqual.equalTo; + +@ContainerMatrixTestsConfiguration +public class FavoritesIT { + private static final String USERNAME = "john.doe1"; + private static final String PASSWORD = "asdfgh"; + private final GraylogApis api; + + public FavoritesIT(GraylogApis apis) { + this.api = apis; + } + + @BeforeAll + public void init() { + api.users().createUser(new Users.User( + USERNAME, + PASSWORD, + "John", + "Doe", + "john.doe1@example.com", + false, + 30_000, + "Europe/Vienna", + Collections.emptyList(), + Collections.emptyList() + )); + } + + @ContainerMatrixTest + void testCreateDeleteFavorite() { + final String defaultIndexSetId = api.indices().defaultIndexSetId(); + final String temporaryStream = api.streams().createStream("Temporary", defaultIndexSetId); + var grn = "grn::::stream:" + temporaryStream; + + given() + .spec(api.requestSpecification()) + .auth().basic(USERNAME, PASSWORD) + .when() + .put("/favorites/" + grn) + .then() + .log().ifStatusCodeMatches(not(204)) + .statusCode(204); + + var validatableResponse = getFavourites(); + validatableResponse.assertThat().body("favorites[0].grn", equalTo(grn)); + + given() + .spec(api.requestSpecification()) + .auth().basic(USERNAME, PASSWORD) + .when() + .delete("/favorites/" + grn) + .then() + .log().ifStatusCodeMatches(not(204)) + .statusCode(204); + + validatableResponse = getFavourites(); + validatableResponse.assertThat().body("favorites", empty()); + } + + private ValidatableResponse getFavourites() { + return given() + .spec(api.requestSpecification()) + .auth().basic(USERNAME, PASSWORD) + .when() + .get("/favorites") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + } + + +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/MessagesResourceIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/MessagesResourceIT.java new file mode 100644 index 000000000000..5badd4b63ab2 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/MessagesResourceIT.java @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import io.restassured.response.Response; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.BeforeAll; + +import java.util.Arrays; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; + +@ContainerMatrixTestsConfiguration(searchVersions = SearchServer.OS2) +public class MessagesResourceIT { + private final GraylogApis api; + + public MessagesResourceIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void importMessages() { + this.api.backend().importElasticsearchFixture("messages-for-export.json", MessagesResourceIT.class); + } + + @ContainerMatrixTest + void testInvalidQuery() { + String allMessagesTimeRange = "{\"query_string\":\"foo:\", \"timerange\": {\"type\": \"absolute\", \"from\": \"2015-01-01T00:00:00\", \"to\": \"2015-01-01T23:59:59\"}}"; + given() + .spec(api.requestSpecification()) + .accept("text/csv") + .body(allMessagesTimeRange) + .post("/views/search/messages") + .then() + .statusCode(400).contentType("application/json") + .assertThat().body("message", Matchers.startsWith("Request validation failed")); + } + + @ContainerMatrixTest + void testInvalidQueryResponse() { + this.api.backend().importElasticsearchFixture("messages-for-export.json", MessagesResourceIT.class); + + String allMessagesTimeRange = "{\"timerange\": {\"type\": \"absolute\", \"from\": \"2015-01-01T00:00:00\", \"to\": \"2015-01-01T23:59:59\"}}"; + + Response r = given() + .spec(api.requestSpecification()) + .accept("text/csv") + .body(allMessagesTimeRange) + .expect().response().statusCode(200).contentType("text/csv") + .when() + .post("/views/search/messages"); + + String[] resultLines = r.asString().split("\n"); + + assertThat(resultLines) + .startsWith("\"timestamp\",\"source\",\"message\"") + .as("should contain header"); + + assertThat(Arrays.copyOfRange(resultLines, 1, 5)).containsExactlyInAnyOrder( + "\"2015-01-01T04:00:00.000Z\",\"source-2\",\"Ho\"", + "\"2015-01-01T03:00:00.000Z\",\"source-1\",\"Hi\"", + "\"2015-01-01T02:00:00.000Z\",\"source-2\",\"He\"", + "\"2015-01-01T01:00:00.000Z\",\"source-1\",\"Ha\"" + ); + } + + /** + * Tests, if setting a time zone on the request results in a response containing results in the timezone + */ + @ContainerMatrixTest + void testTimeZone() { + this.api.backend().importElasticsearchFixture("messages-for-export.json", MessagesResourceIT.class); + + String allMessagesTimeRange = """ + {"timerange": { + "type": "absolute", + "from": "2015-01-01T00:00:00", + "to": "2015-01-01T23:59:59" + }, + "time_zone": "Antarctica/Casey" + } + """; + + Response r = given() + .spec(api.requestSpecification()) + .accept("text/csv") + .body(allMessagesTimeRange) + .post("/views/search/messages"); + + String[] resultLines = r.asString().split("\n"); + + assertThat(resultLines) + .startsWith("\"timestamp\",\"source\",\"message\"") + .as("should contain header"); + + assertThat(Arrays.copyOfRange(resultLines, 1, 5)).containsExactlyInAnyOrder( + "\"2015-01-01T09:00:00.000+08:00\",\"source-1\",\"Ha\"", + "\"2015-01-01T10:00:00.000+08:00\",\"source-2\",\"He\"", + "\"2015-01-01T11:00:00.000+08:00\",\"source-1\",\"Hi\"", + "\"2015-01-01T12:00:00.000+08:00\",\"source-2\",\"Ho\"" + ); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/QueryValidationResourceIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/QueryValidationResourceIT.java new file mode 100644 index 000000000000..86a3f0c1d3bd --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/QueryValidationResourceIT.java @@ -0,0 +1,217 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import static io.restassured.RestAssured.given; +import static org.graylog.testing.completebackend.Lifecycle.CLASS; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.core.IsEqual.equalTo; + +@ContainerMatrixTestsConfiguration(serverLifecycle = CLASS, searchVersions = SearchServer.OS1) +public class QueryValidationResourceIT { + + private final GraylogApis api; + + public QueryValidationResourceIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void importMessage() { + api.gelf() + .createGelfHttpInput() + .postMessage( + """ + { + "short_message":"query-validation-test", + "host":"example.org", + "type":"ssh", + "source":"example.org", + "http_response_code":200, + "bytes":42, + "timestamp": "2019-07-23 09:53:08.175", + "otherDate": "2020-07-29T12:00:00.000-05:00", + "resource": "posts", + "always_find_me": "whatever", + "level":3 + }"""); + + api.search().waitForMessage("query-validation-test"); + api.fieldTypes().waitForFieldTypeDefinitions("level"); + } + + + @ContainerMatrixTest + void testMinimalisticRequest() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body("{\"query\":\"foo:bar\"}") + .post("/search/validate") + .then() + .statusCode(200); + validatableResponse.assertThat().body("status", equalTo("WARNING")); + } + + @ContainerMatrixTest + void testInvalidQuery() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body("{\"query\":\"foo:\"}") + .post("/search/validate") + .then() + .statusCode(200); + validatableResponse.assertThat().body("status", equalTo("ERROR")); + validatableResponse.assertThat().body("explanations.error_message[0]", containsString("Cannot parse query, cause: incomplete query, query ended unexpectedly")); + } + + @ContainerMatrixTest + void testOrQuery() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body("{\"query\":\"unknown_field:(x OR y)\"}") + .post("/search/validate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validatableResponse.assertThat().body("status", equalTo("WARNING")); + validatableResponse.assertThat().body("explanations.error_message[0]", containsString("Query contains unknown field: unknown_field")); + } + + @ContainerMatrixTest + void testRegexWithoutFieldName() { + verifyQueryIsValidatedSuccessfully("/ethernet[0-9]+/"); + } + + @ContainerMatrixTest + void testLowercaseNotOperator() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body("{\"query\":\"not(http_response_code:200)\"}") + .post("/search/validate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + validatableResponse.assertThat().body("status", equalTo("WARNING")); + validatableResponse.assertThat().body("explanations.error_message[0]", containsString("Query contains invalid operator \"not\". All AND / OR / NOT operators have to be written uppercase")); + } + + @ContainerMatrixTest + void testInvalidValueType() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body("{\"query\":\"timestamp:AAA\"}") + .post("/search/validate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + validatableResponse.assertThat().body("status", equalTo("WARNING")); + validatableResponse.assertThat().body("explanations.error_type[0]", equalTo("INVALID_VALUE_TYPE")); + } + + @ContainerMatrixTest + void testSuccessfullyValidatesExistsTerms() { + verifyQueryIsValidatedSuccessfully("_exists_:timestamp"); + verifyQueryIsValidatedSuccessfully("_exists_:level"); + } + + @ContainerMatrixTest + void testQuotedDefaultField() { + // if the validation correctly recognizes the quoted text, it should not warn about lowercase or + verifyQueryIsValidatedSuccessfully("\\\"A or B\\\""); + } + + + @ContainerMatrixTest + void testQueriesFromDocumentationAreValidatedSuccessfully() { + //Uses https://docs.graylog.org/docs/query-language as a source of documented queries (accessed 27.06.2022) + verifyQueryIsValidatedSuccessfully("ssh"); + verifyQueryIsValidatedSuccessfully("ssh login"); + verifyQueryIsValidatedSuccessfully("\\\"ssh login\\\""); + verifyQueryIsValidatedSuccessfully("type:ssh"); + verifyQueryIsValidatedSuccessfully("type:(ssh OR login)"); + verifyQueryIsValidatedSuccessfully("type:\\\"ssh login\\\" "); + verifyQueryIsValidatedSuccessfully("_exists_:type "); + verifyQueryIsValidatedSuccessfully("NOT _exists_:type "); + verifyQueryIsValidatedSuccessfully("/ethernet[0-9]+/"); + verifyQueryIsValidatedSuccessfully("\\\"ssh login\\\" AND source:example.org"); + verifyQueryIsValidatedSuccessfully("(\\\"ssh login\\\" AND (source:example.org OR source:another.example.org)) OR _exists_:always_find_me"); + verifyQueryIsValidatedSuccessfully("\\\"ssh login\\\" AND NOT source:example.org"); + verifyQueryIsValidatedSuccessfully("NOT example.org"); + verifyQueryIsValidatedWithValidationError("source:*.org"); //expected leading wildcard validation error with default settings + verifyQueryIsValidatedSuccessfully("source:exam?le.org"); + verifyQueryIsValidatedSuccessfully("source:exam?le.*"); + verifyQueryIsValidatedSuccessfully("ssh logni~ "); + verifyQueryIsValidatedSuccessfully("source:exmaple.org~"); + verifyQueryIsValidatedSuccessfully("source:exmaple.org~1 "); + verifyQueryIsValidatedSuccessfully("\\\"foo bar\\\"~5 "); + verifyQueryIsValidatedSuccessfully("http_response_code:[500 TO 504]"); + verifyQueryIsValidatedSuccessfully("http_response_code:{400 TO 404}"); + verifyQueryIsValidatedSuccessfully("bytes:{0 TO 64]"); + verifyQueryIsValidatedSuccessfully("http_response_code:[0 TO 64}"); + verifyQueryIsValidatedSuccessfully("http_response_code:>400"); + verifyQueryIsValidatedSuccessfully("http_response_code:<400"); + verifyQueryIsValidatedSuccessfully("http_response_code:>=400"); + verifyQueryIsValidatedSuccessfully("http_response_code:<=400"); + verifyQueryIsValidatedSuccessfully("http_response_code:(>=400 AND <500)"); + verifyQueryIsValidatedSuccessfully("timestamp:[\\\"2019-07-23 09:53:08.175\\\" TO \\\"2019-07-23 09:53:08.575\\\"]"); + verifyQueryIsValidatedSuccessfully("otherDate:[\\\"2019-07-23T09:53:08.175\\\" TO \\\"2019-07-23T09:53:08.575\\\"]"); + verifyQueryIsValidatedSuccessfully("otherDate:[\\\"2020-07-29T12:00:00.000-05:00\\\" TO \\\"2020-07-30T15:13:00.000-05:00\\\"]"); + verifyQueryIsValidatedSuccessfully("otherDate:[now-5d TO now-4d]"); + verifyQueryIsValidatedSuccessfully("resource:\\/posts\\/45326"); + } + + private void verifyQueryIsValidatedSuccessfully(final String query) { + given() + .spec(api.requestSpecification()) + .when() + .body("{\"query\": \"" + query + "\"}") + .post("/search/validate") + .then() + .statusCode(200) + .log().ifStatusCodeMatches(not(200)) + .log().ifValidationFails() + .assertThat().body("status", equalTo("OK")); + } + + private void verifyQueryIsValidatedWithValidationError(final String query) { + given() + .spec(api.requestSpecification()) + .when() + .body("{\"query\": \"" + query + "\"}") + .post("/search/validate") + .then() + .statusCode(200) + .log().ifStatusCodeMatches(not(200)) + .log().ifValidationFails() + .assertThat().body("status", equalTo("ERROR")); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/ScriptingApiResourceIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/ScriptingApiResourceIT.java new file mode 100644 index 000000000000..a73367b6a928 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/ScriptingApiResourceIT.java @@ -0,0 +1,957 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import au.com.bytecode.opencsv.CSVParser; +import io.restassured.http.Header; +import io.restassured.path.json.JsonPath; +import io.restassured.response.ValidatableResponse; +import jakarta.ws.rs.core.MediaType; +import org.graylog.testing.completebackend.apis.GraylogApiResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Sharing; +import org.graylog.testing.completebackend.apis.SharingRequest; +import org.graylog.testing.completebackend.apis.Streams; +import org.graylog.testing.completebackend.apis.Users; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.rest.MoreMediaTypes; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.joda.time.DateTime; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Java6Assertions.within; +import static org.graylog.testing.completebackend.Lifecycle.CLASS; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.hasEntry; + +@ContainerMatrixTestsConfiguration(serverLifecycle = CLASS, + searchVersions = {SearchServer.ES7, SearchServer.OS2}) +public class ScriptingApiResourceIT { + + public static final String DEFAULT_STREAM = "000000000000000000000001"; + private final GraylogApis api; + + private String stream1Id; + private String stream2Id; + + public ScriptingApiResourceIT(GraylogApis apis) { + this.api = apis; + } + + @BeforeAll + public void beforeAll() { + + final String defaultIndexSetId = api.indices().defaultIndexSetId(); + + final JsonPath user = api.users().createUser(Users.JOHN_DOE); + + final String userId = user.getString("id"); + + + this.stream1Id = api.streams().createStream("Stream #1", defaultIndexSetId, Streams.StreamRule.exact("stream1", "target_stream", false)); + this.stream2Id = api.streams().createStream("Stream #2", defaultIndexSetId, Streams.StreamRule.exact("stream2", "target_stream", false)); + + api.sharing().setSharing(new SharingRequest( + new SharingRequest.Entity(Sharing.ENTITY_STREAM, stream2Id), + Map.of( + new SharingRequest.Entity(Sharing.ENTITY_USER, userId), Sharing.PERMISSION_VIEW + ))); + + api.gelf() + .createGelfHttpInput(12201) + .postMessage(""" + {"short_message":"search-sync-test", "host":"example.org", "facility":"test", "_level":1, "_target_stream": "stream1"} + """) + .postMessage(""" + {"short_message":"search-sync-test-2", "host":"example.org", "facility":"another-test", "_level":2, "_target_stream": "stream2"} + """) + .postMessage(""" + {"short_message":"search-sync-test-3", "host":"lorem-ipsum.com", "facility":"another-test", "_level":3, "_http_method":"POST", "_target_stream": "stream2"} + """); + + api.search().waitForMessagesCount(3); + api.fieldTypes().waitForFieldTypeDefinitions("source", "facility", "level"); + } + + @ContainerMatrixTest + void testAggregationByStream() { + final ValidatableResponse validatableResponse = + api.post("/search/aggregate", """ + { + "group_by": [ + { + "field": "streams.id" + } + ], + "metrics": [ + { + "function": "count" + } + ] + } + """, 200); + + validatableResponse.log().ifValidationFails() + .assertThat().body("datarows", Matchers.hasSize(3)); + + validateRow(validatableResponse, DEFAULT_STREAM, 3); + validateRow(validatableResponse, stream2Id, 2); + validateRow(validatableResponse, stream1Id, 1); + } + + @ContainerMatrixTest + void testStdDevSorting() { + final GraylogApiResponse responseDesc = + new GraylogApiResponse(api.post("/search/aggregate", """ + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "stddev", + "field": "level", + "sort": "desc" + } + ] + } + """, 200)); + + responseDesc.validatableResponse().log().ifValidationFails() + .assertThat().body("datarows", Matchers.hasSize(2)); + + List stddevDesc = responseDesc.properJSONPath().read("datarows.*[1]"); + org.assertj.core.api.Assertions.assertThat(stddevDesc) + .hasSize(2) + .containsExactly(0.5, 0.0); + + final GraylogApiResponse responseAsc = + new GraylogApiResponse(api.post("/search/aggregate", """ + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "stddev", + "field": "level", + "sort": "asc" + } + ] + } + """, 200)); + + List stddevAsc = responseAsc.properJSONPath().read("datarows.*[1]"); + org.assertj.core.api.Assertions.assertThat(stddevAsc) + .hasSize(2) + .containsExactly(0.0, 0.5); + } + + @ContainerMatrixTest + void testAggregationByStreamTitle() { + final ValidatableResponse validatableResponse = + api.post("/search/aggregate", """ + { + "group_by": [ + { + "field": "streams.title" + } + ], + "metrics": [ + { + "function": "count" + } + ] + } + """, 200); + + validatableResponse.log().ifValidationFails() + .assertThat().body("datarows", Matchers.hasSize(3)); + + validateRow(validatableResponse, "Default Stream", 3); + validateRow(validatableResponse, "Stream #2", 2); + validateRow(validatableResponse, "Stream #1", 1); + } + + @ContainerMatrixTest + void testUserWithLimitedPermissionRequest() { + + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .auth().basic("john.doe", "asdfgh") + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validatableResponse.assertThat().body("datarows", Matchers.hasSize(1)); + validateRow(validatableResponse, "another-test", 2); + } + + @ContainerMatrixTest + void testSchema() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateSchema(validatableResponse, "grouping: facility", "string", "facility"); + validateSchema(validatableResponse, "metric: count(facility)", "numeric", "facility"); + } + + @ContainerMatrixTest + void testMinimalRequest() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateRow(validatableResponse, "another-test", 2); + validateRow(validatableResponse, "test", 1); + } + + @ContainerMatrixTest + void testAsciiRender() { + final String response = given() + .spec(api.requestSpecification()) + .header(new Header("Accept", MediaType.TEXT_PLAIN)) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200) + .extract().body().asString().trim(); + + String expected = """ + ┌────────────────────────┬───────────────────────┐ + │grouping: facility │metric: count(facility)│ + ├────────────────────────┼───────────────────────┤ + │another-test │2 │ + │test │1 │ + └────────────────────────┴───────────────────────┘ + """; + + assertThat(response).isEqualTo(expected.trim()); + } + + @ContainerMatrixTest + void testGetRequestAcii() { + final String response = given() + .spec(api.requestSpecification()) + .header(new Header("Accept", MediaType.TEXT_PLAIN)) + .when() + .get("/search/aggregate?groups=facility&metrics=count:facility") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200) + .extract().body().asString().trim(); + + String expected = """ + ┌────────────────────────┬───────────────────────┐ + │grouping: facility │metric: count(facility)│ + ├────────────────────────┼───────────────────────┤ + │another-test │2 │ + │test │1 │ + └────────────────────────┴───────────────────────┘ + """; + assertThat(response).isEqualTo(expected.trim()); + } + + @ContainerMatrixTest + void testCsvRender() throws Exception { + final InputStream response = given() + .spec(api.requestSpecification()) + .header(new Header("Accept", MoreMediaTypes.TEXT_CSV)) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200) + .extract().body().asInputStream(); + + final List lines = parseCsvLines(response); + + // headers + Assertions.assertArrayEquals(lines.get(0), new String[]{"grouping: facility", "metric: count(facility)"}); + + //rows + Assertions.assertArrayEquals(lines.get(1), new String[]{"another-test", "2"}); + Assertions.assertArrayEquals(lines.get(2), new String[]{"test", "1"}); + } + + private List parseCsvLines(InputStream inputStream) throws Exception { + final CSVParser csvParser = new CSVParser(',', '"'); + final List lines = new ArrayList<>(); + + try (final var reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { + while (reader.ready()) { + lines.add(csvParser.parseLine(reader.readLine())); + } + } + + return lines; + } + + @ContainerMatrixTest + void testGetRequestCsv() throws Exception { + + final InputStream response = given() + .spec(api.requestSpecification()) + .header(new Header("Accept", MoreMediaTypes.TEXT_CSV)) + .when() + .get("/search/aggregate?groups=facility&metrics=count:facility") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200) + .extract().body().asInputStream(); + + + final List lines = parseCsvLines(response); + + // headers + Assertions.assertArrayEquals(lines.get(0), new String[]{"grouping: facility", "metric: count(facility)"}); + + //rows + Assertions.assertArrayEquals(lines.get(1), new String[]{"another-test", "2"}); + Assertions.assertArrayEquals(lines.get(2), new String[]{"test", "1"}); + } + + @ContainerMatrixTest + void testGetRequestJson() { + final ValidatableResponse response = given() + .spec(api.requestSpecification()) + .header(new Header("Accept", MediaType.APPLICATION_JSON)) + .when() + .get("/search/aggregate?groups=facility&metrics=count:facility") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateRow(response, "another-test", 2); + validateRow(response, "test", 1); + } + + @ContainerMatrixTest + void testTwoAggregations() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + }, + { + "function": "max", + "field": "level" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateRow(validatableResponse, "another-test", 2, 3.0f); + validateRow(validatableResponse, "test", 1, 1.0f); + } + + @ContainerMatrixTest + void testDuplicatedMetrics() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + }, + { + "function": "count", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .statusCode(200); + validateRow(validatableResponse, "another-test", 2, 2); + validateRow(validatableResponse, "test", 1, 1); + } + + @ContainerMatrixTest + void testAggregationWithoutMatchingField() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "http_method" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateRow(validatableResponse, "POST", 1); + validateRow(validatableResponse, "(Empty Value)", 2); + } + + @ContainerMatrixTest + void testMissingDataInRow() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + }, + { + "function": "latest", + "field": "http_method" + }, + { + "function": "max", + "field": "level" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateRow(validatableResponse, "another-test", 2, "POST", 3.0f); + validateRow(validatableResponse, "test", 1, "-", 1.0f); + } + + @ContainerMatrixTest + void testStreamFiltering() { + final String req = """ + { + "streams": ["%s"], + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility" + } + ] + } + """; + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(String.format(Locale.ROOT, req, stream2Id)) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateRow(validatableResponse, "another-test", 2); + validatableResponse.assertThat().body("datarows", Matchers.hasSize(1)); + } + + @ContainerMatrixTest + void testSorting() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility", + "sort": "asc" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + final List> rows = validatableResponse.extract().body().jsonPath().getList("datarows"); + Assertions.assertEquals(rows.size(), 2); + Assertions.assertEquals(Arrays.asList("test", (Object) 1), rows.get(0)); + Assertions.assertEquals(Arrays.asList("another-test", (Object) 2), rows.get(1)); + } + + @ContainerMatrixTest + void testMetadata() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "timerange": { + "type": "relative", + "range": 300 + }, + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "count", + "field": "facility", + "sort": "asc" + } + ] + } + """) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validatableResponse.assertThat().body("metadata.effective_timerange.type", Matchers.equalTo("absolute")); + final String from = validatableResponse.extract().body().jsonPath().getString("metadata.effective_timerange.from"); + final String to = validatableResponse.extract().body().jsonPath().getString("metadata.effective_timerange.to"); + final DateTime fromDateTime = DateTime.parse(from); + final DateTime toDateTime = DateTime.parse(to); + final float diff = toDateTime.getMillis() - fromDateTime.getMillis(); + assertThat(diff).isCloseTo(300_000, within(10_000f)); + } + + @ContainerMatrixTest + void testErrorHandling() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "max", + "field": "facility" + } + ] + } + """) + .post("/search/aggregate") + .then() + .statusCode(400) + .assertThat() + .body("type", Matchers.equalTo("ApiError")) + .body("message", Matchers.containsString("Failed to obtain results")); + } + + @ContainerMatrixTest + void testMessages() { + final ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "fields": ["source", "facility", "level"] + } + """) + .post("/search/messages") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + validateSchema(validatableResponse, "field: source", "string", "source"); + validateSchema(validatableResponse, "field: facility", "string", "facility"); + validateSchema(validatableResponse, "field: level", "numeric", "level"); + + validateRow(validatableResponse, "lorem-ipsum.com", "another-test", 3); + validateRow(validatableResponse, "example.org", "another-test", 2); + validateRow(validatableResponse, "example.org", "test", 1); + + } + + @ContainerMatrixTest + void testMessagesWithSorting() { + ValidatableResponse validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "fields": ["source", "facility", "level"], + "sort": "level", + "sort_order" : "Descending" + } + """) + .post("/search/messages") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + List> rows = validatableResponse.extract().body().jsonPath().getList("datarows"); + Assertions.assertEquals(rows.size(), 3); + assertThat(rows.get(0)).contains(3); + assertThat(rows.get(1)).contains(2); + assertThat(rows.get(2)).contains(1); + + validatableResponse = given() + .spec(api.requestSpecification()) + .when() + .body(""" + { + "fields": ["source", "facility", "level"], + "sort": "facility", + "sort_order" : "Ascending" + } + """) + .post("/search/messages") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200); + + rows = validatableResponse.extract().body().jsonPath().getList("datarows"); + Assertions.assertEquals(rows.size(), 3); + assertThat(rows.get(0)).contains("another-test"); + assertThat(rows.get(1)).contains("another-test"); + assertThat(rows.get(2)).contains("test"); + + } + + @ContainerMatrixTest + void testMessagesGetRequestAscii() { + final List response = given() + .spec(api.requestSpecification()) + .when() + .header(new Header("Accept", MediaType.TEXT_PLAIN)) + .get("/search/messages?fields=source,facility,level") + .then() + .log().ifStatusCodeMatches(not(200)) + .extract().body().asString().strip().lines().toList(); + + final List expected = """ + ┌────────────────────────┬────────────────────────┬───────────────────────┐ + │field: source │field: facility │field: level │ + ├────────────────────────┼────────────────────────┼───────────────────────┤ + │lorem-ipsum.com │another-test │3 │ + │example.org │another-test │2 │ + │example.org │test │1 │ + └────────────────────────┴────────────────────────┴───────────────────────┘ + """.strip().lines().toList(); + + assertThat(response.size()).isEqualTo(expected.size()); + assertThat(expected.containsAll(response)).isTrue(); + } + + @ContainerMatrixTest + void testPercentageMetric() { + final String req = """ + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "percentage" + } + ] + } + """; + final var response = given() + .spec(api.requestSpecification()) + .when() + .header(new Header("Accept", MediaType.TEXT_PLAIN)) + .body(req) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200) + .extract().body().asString().strip().lines().toList(); + + final List percentageMetricExpectedResult = """ + ┌────────────────────────┬───────────────────────┐ + │grouping: facility │metric: percentage() │ + ├────────────────────────┼───────────────────────┤ + │another-test │0.6666666666666666 │ + │test │0.3333333333333333 │ + └────────────────────────┴───────────────────────┘ + """.strip().lines().toList(); + + assertThat(response.size()).isEqualTo(percentageMetricExpectedResult.size()); + assertThat(percentageMetricExpectedResult.containsAll(response)).isTrue(); + } + + @ContainerMatrixTest + void testPercentageMetricWithFieldName() { + final String req = """ + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "percentage", + "field": "facility" + } + ] + } + """; + final var response = given() + .spec(api.requestSpecification()) + .when() + .header(new Header("Accept", MediaType.TEXT_PLAIN)) + .body(req) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200) + .extract().body().asString().strip().lines().toList(); + + final List percentageMetricExpectedResult = """ + ┌────────────────────────┬───────────────────────┐ + │grouping: facility │metric: │ + │ │percentage(facility) │ + ├────────────────────────┼───────────────────────┤ + │another-test │0.6666666666666666 │ + │test │0.3333333333333333 │ + └────────────────────────┴───────────────────────┘ + """.strip().lines().toList(); + + assertThat(response.size()).isEqualTo(percentageMetricExpectedResult.size()); + assertThat(percentageMetricExpectedResult.containsAll(response)).isTrue(); + } + + @ContainerMatrixTest + void testPercentageMetricWithConfig() { + final String req = """ + { + "group_by": [ + { + "field": "facility" + } + ], + "metrics": [ + { + "function": "percentage", + "field": "facility", + "configuration" : { + "strategy" : "COUNT" + } + } + ] + } + """; + final var response = given() + .spec(api.requestSpecification()) + .when() + .header(new Header("Accept", MediaType.TEXT_PLAIN)) + .body(req) + .post("/search/aggregate") + .then() + .log().ifStatusCodeMatches(not(200)) + .statusCode(200) + .extract().body().asString().strip().lines().toList(); + + final List percentageMetricExpectedResult = """ + ┌────────────────────────┬───────────────────────┐ + │grouping: facility │metric: │ + │ │percentage(facility) │ + ├────────────────────────┼───────────────────────┤ + │another-test │0.6666666666666666 │ + │test │0.3333333333333333 │ + └────────────────────────┴───────────────────────┘ + """.strip().lines().toList(); + + assertThat(response.size()).isEqualTo(percentageMetricExpectedResult.size()); + assertThat(percentageMetricExpectedResult.containsAll(response)).isTrue(); + } + + private void validateSchema(ValidatableResponse response, String name, String type, String field) { + response.assertThat().body("schema", Matchers.hasItem( + Matchers.allOf( + entry("name", name), + entry("type", type), + entry("field", field) + ) + )); + } + + /** + * Each data row consist of an array, containing key as the first item, followed by values for each metric. + */ + private void validateRow(ValidatableResponse response, String key, Object... values) { + final ArrayList expected = new ArrayList<>(); + expected.add(key); + expected.addAll(Arrays.asList(values)); + + response.assertThat().body("datarows", Matchers.hasItem(Matchers.equalTo(expected))); + } + + private Matcher> entry(String key, Object value) { + return hasEntry(key, value); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/SearchMetadataIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/SearchMetadataIT.java new file mode 100644 index 000000000000..0a3344191f7a --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/SearchMetadataIT.java @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.io.InputStream; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.core.IsEqual.equalTo; + +@ContainerMatrixTestsConfiguration +public class SearchMetadataIT { + private final GraylogApis api; + + public SearchMetadataIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void importMongoFixtures() { + this.api.backend().importMongoDBFixture("mongodb-stored-searches-for-metadata-endpoint.json", SearchMetadataIT.class); + } + + @ContainerMatrixTest + void testEmptyRequest() { + given() + .spec(api.requestSpecification()) + .when() + .post("/views/search/metadata") + .then() + .statusCode(400) + .assertThat().body("message[0]", equalTo("Search body is mandatory")); + } + + @ContainerMatrixTest + void testMinimalRequestWithoutParameter() { + final ValidatableResponse response = given() + .spec(api.requestSpecification()) + .when() + .body(fixture("org/graylog/plugins/views/minimalistic-request.json")) + .post("/views/search/metadata") + .then() + .statusCode(200); + + response.assertThat().body("query_metadata*.value.used_parameters_names[0]", empty()); + response.assertThat().body("declared_parameters", anEmptyMap()); + } + + @ContainerMatrixTest + void testMinimalRequestWithSingleParameter() { + final ValidatableResponse response = given() + .spec(api.requestSpecification()) + .when() + .body(fixture("org/graylog/plugins/views/minimalistic-request-with-undeclared-parameter.json")) + .post("/views/search/metadata") + .then() + .statusCode(200); + + response.assertThat().body("query_metadata.f1446410-a082-4871-b3bf-d69aa42d0c96.used_parameters_names", contains("action")); + response.assertThat().body("declared_parameters", anEmptyMap()); + } + + @ContainerMatrixTest + void testRetrievingMetadataForStoredSearchWithoutParameter() { + final ValidatableResponse response = given() + .spec(api.requestSpecification()) + .when() + .get("/views/search/metadata/61977428c1f17d26b45c8a0b") + .then() + .statusCode(200); + + response.assertThat().body("query_metadata.f1446410-a082-4871-b3bf-d69aa42d0c96.used_parameters_names", empty()); + response.assertThat().body("declared_parameters", anEmptyMap()); + } + + @ContainerMatrixTest + void testRetrievingMetadataForStoredSearchWithParameter() { + final ValidatableResponse response = given() + .spec(api.requestSpecification()) + .when() + .get("/views/search/metadata/61977043c1f17d26b45c8a0a") + .then() + .statusCode(200); + + response.assertThat().body("query_metadata.f1446410-a082-4871-b3bf-d69aa42d0c96.used_parameters_names", contains("action")); + response.assertThat().body("declared_parameters", anEmptyMap()); + } + + private InputStream fixture(String filename) { + return getClass().getClassLoader().getResourceAsStream(filename); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/SearchSyncIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/SearchSyncIT.java new file mode 100644 index 000000000000..4dd0b18b8ddd --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/SearchSyncIT.java @@ -0,0 +1,233 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.Retryer; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.io.InputStream; +import java.util.Collections; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.core.IsEqual.equalTo; + +@ContainerMatrixTestsConfiguration +public class SearchSyncIT { + static final int GELF_HTTP_PORT = 12201; + + private final GraylogApis api; + + public SearchSyncIT(final GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void importMongoFixtures() { + this.api.backend().importMongoDBFixture("mongodb-stored-searches-for-execution-endpoint.json", SearchSyncIT.class); + + api.gelf().createGelfHttpInput(GELF_HTTP_PORT) + .postMessage("{\"short_message\":\"search-sync-test\", \"host\":\"example.org\", \"facility\":\"test\"}"); + api.search().waitForMessage("search-sync-test"); + } + + @ContainerMatrixTest + void testEmptyBody() { + given() + .spec(api.requestSpecification()) + .when() + .post("/views/search/sync") + .then() + .statusCode(400) + .assertThat().body("message[0]", equalTo("Search body is mandatory")); + } + + @ContainerMatrixTest + void testMinimalisticRequest() { + given() + .spec(api.requestSpecification()) + .when() + .body(fixture("org/graylog/plugins/views/minimalistic-request.json")) + .post("/views/search/sync") + .then() + .statusCode(200) + .assertThat() + .body("execution.completed_exceptionally", equalTo(false)) + .body("results*.value.search_types[0]*.value.messages.message.message[0]", hasItem("search-sync-test")); + } + + @ContainerMatrixTest + void testMinimalisticRequestv2() { + given() + .spec(api.requestSpecification()) + .accept("application/vnd.graylog.search.v2+json") + .contentType("application/vnd.graylog.search.v2+json") + .when() + .body(fixture("org/graylog/plugins/views/minimalistic-request.json")) + .post("/views/search/sync") + .then() + .statusCode(200) + .assertThat() + .body("execution.completed_exceptionally", equalTo(false)) + .body("results*.value.search_types[0]*.value.messages.message.message[0]", hasItem("search-sync-test")); + } + + @ContainerMatrixTest + void testRequestWithStreamsv2() { + given() + .spec(api.requestSpecification()) + .accept("application/vnd.graylog.search.v2+json") + .contentType("application/vnd.graylog.search.v2+json") + .when() + .body(fixture("org/graylog/plugins/views/minimalistic-request-with-streams.json")) + .post("/views/search/sync") + .then() + .statusCode(200) + .assertThat() + .body("execution.completed_exceptionally", equalTo(false)) + .body("results*.value.search_types[0]*.value.messages.message.message[0]", hasItem("search-sync-test")); + } + + @ContainerMatrixTest + void testRequestStoredSearch() throws ExecutionException, RetryException { + final String jobId = executeStoredSearch("61977043c1f17d26b45c8a0b"); + + retrieveSearchResults(jobId) + .body("execution.completed_exceptionally", equalTo(false)) + .body("results.f1446410-a082-4871-b3bf-d69aa42d0c96.search_types.8306779b-933f-473f-837d-b7a7d83a9a40.name", equalTo("chart")); + } + + @ContainerMatrixTest + void testRequestStoredSearchWithGlobalOverrideKeepingOnlySingleSearchType() throws ExecutionException, RetryException { + final String jobId = executeStoredSearch("61977043c1f17d26b45c8a0b", Collections.singletonMap( + "global_override", Collections.singletonMap( + "keep_search_types", Collections.singleton("01c76680-377b-4930-86e2-a55fdb867b58") + ) + )); + + retrieveSearchResults(jobId) + .body("execution.completed_exceptionally", equalTo(false)) + .body("results.f1446410-a082-4871-b3bf-d69aa42d0c96.search_types", not(hasKey("f1446410-a082-4871-b3bf-d69aa42d0c97"))) + .body("results.f1446410-a082-4871-b3bf-d69aa42d0c97.search_types", hasKey("01c76680-377b-4930-86e2-a55fdb867b58")); + } + + @ContainerMatrixTest + void testRequestStoredSearchWithGlobalOverrideKeepingOnlySingleQuery() throws ExecutionException, RetryException { + final String jobId = executeStoredSearch("61977043c1f17d26b45c8a0b", Collections.singletonMap( + "global_override", Collections.singletonMap( + "keep_queries", Collections.singleton("f1446410-a082-4871-b3bf-d69aa42d0c97") + ) + )); + + retrieveSearchResults(jobId) + .body("execution.completed_exceptionally", equalTo(false)) + .body("results", not(hasKey("f1446410-a082-4871-b3bf-d69aa42d0c96"))) + .body("results.f1446410-a082-4871-b3bf-d69aa42d0c97.search_types", hasKey("01c76680-377b-4930-86e2-a55fdb867b58")); + } + + @ContainerMatrixTest + void testThatQueryOrderStaysConsistentInV1() { + given() + .config(api.withGraylogBackendFailureConfig()) + .spec(api.requestSpecification()) + .accept("application/json") + .contentType("application/json") + .when() + .body(fixture("org/graylog/plugins/views/search-with-three-empty-queries.json")) + .post("/views/search") + .then() + .log().ifStatusCodeMatches(not(201)) + .statusCode(201) + .assertThat() + .body("queries*.id", contains("4966dd79-2c7d-4ba9-8f90-c84aea7b5c49", + "0d5b45b8-1f55-4b60-ad34-d086ddd5d8fa", + "3eec6f5c-0f1b-41dc-bb95-3ebc6bb905f3")); + } + + @ContainerMatrixTest + void testThatQueryOrderStaysConsistentInV2() { + given() + .config(api.withGraylogBackendFailureConfig()) + .spec(api.requestSpecification()) + .accept("application/vnd.graylog.search.v2+json") + .contentType("application/vnd.graylog.search.v2+json") + .when() + .body(fixture("org/graylog/plugins/views/search-with-three-empty-queries-v2.json")) + .post("/views/search") + .then() + .log().ifStatusCodeMatches(not(201)) + .statusCode(201) + .assertThat() + .body("queries*.id", contains("4966dd79-2c7d-4ba9-8f90-c84aea7b5c49", + "0d5b45b8-1f55-4b60-ad34-d086ddd5d8fa", + "3eec6f5c-0f1b-41dc-bb95-3ebc6bb905f3")); + } + + private String executeStoredSearch(String searchId) { + return executeStoredSearch(searchId, Collections.emptyMap()); + } + + private String executeStoredSearch(String searchId, Object body) { + final ValidatableResponse result = given() + .spec(api.requestSpecification()) + .when() + .body(body) + .post("/views/search/{searchId}/execute", searchId) + .then() + .statusCode(201); + + final String jobId = result.extract().path("id"); + + assertThat(jobId).isNotBlank(); + + return jobId; + } + + private ValidatableResponse retrieveSearchResults(String jobId) throws ExecutionException, RetryException { + final Retryer retryer = RetryerBuilder.newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(1, TimeUnit.SECONDS)) + .withStopStrategy(StopStrategies.stopAfterAttempt(5)) + .retryIfExceptionOfType(AssertionError.class) + .build(); + + return retryer.call(() -> given() + .spec(api.requestSpecification()) + .when() + .get("/views/search/status/{jobId}", jobId) + .then() + .statusCode(200) + .body("execution.done", equalTo(true))); + } + + private InputStream fixture(String filename) { + return getClass().getClassLoader().getResourceAsStream(filename); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/SessionsResourceIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/SessionsResourceIT.java new file mode 100644 index 000000000000..9e482bef13e0 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/SessionsResourceIT.java @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import com.google.common.collect.ImmutableMap; +import io.restassured.builder.RequestSpecBuilder; +import io.restassured.http.Cookie; +import io.restassured.response.Response; +import io.restassured.specification.RequestSpecification; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import java.util.Collections; +import java.util.Map; + +import static io.restassured.RestAssured.given; +import static io.restassured.http.ContentType.JSON; +import static org.assertj.core.api.Assertions.assertThat; +import static org.hamcrest.Matchers.emptyOrNullString; +import static org.hamcrest.Matchers.not; + +@ContainerMatrixTestsConfiguration(searchVersions = {SearchServer.OS1}) +public class SessionsResourceIT { + private static final String SESSIONS_RESOURCE = "/system/sessions"; + private static final String AUTHENTICATION_COOKIE = "authentication"; + private static final Map VALID_CREDENTIALS = ImmutableMap.of( + "username", "admin", + "password", "admin" + ); + private static final Map INVALID_CREDENTIALS = ImmutableMap.of( + "username", "admin", + "password", "wrongpassword" + ); + + private final GraylogApis api; + + private static RequestSpecification makeRequestSpec(GraylogApis api) { + return new RequestSpecBuilder().build() + .baseUri(api.backend().uri()) + .port(api.backend().apiPort()) + .basePath("/api") + .accept(JSON) + .contentType(JSON) + .header("X-Requested-By", "peterchen"); + } + + public SessionsResourceIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void failingLoginShouldNotReturnCookieOrToken() { + given() + .spec(makeRequestSpec(api)) + .post(SESSIONS_RESOURCE) + .then() + .assertThat() + .statusCode(400) + .cookies(Collections.emptyMap()); + + given() + .spec(makeRequestSpec(api)) + .body(INVALID_CREDENTIALS) + .post(SESSIONS_RESOURCE) + .then() + .assertThat() + .statusCode(401) + .cookies(Collections.emptyMap()); + } + + @ContainerMatrixTest + void successfulLoginShouldReturnCookieAndToken() { + final Response response = given() + .spec(makeRequestSpec(api)) + .body(VALID_CREDENTIALS) + .post(SESSIONS_RESOURCE); + + response.then() + .assertThat() + .statusCode(200) + .body("session_id", not(emptyOrNullString())) + .cookie(AUTHENTICATION_COOKIE, not(emptyOrNullString())); + + assertThat(response.jsonPath().getString("session_id")) + .isEqualTo(response.cookie(AUTHENTICATION_COOKIE)); + + final Cookie authenticationCookie = response.getDetailedCookie(AUTHENTICATION_COOKIE); + final RequestSpecification authenticatedRequest = makeRequestSpec(api).cookie(authenticationCookie); + + given() + .spec(authenticatedRequest) + .get("/system") + .then() + .assertThat() + .statusCode(200); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/StartPageLastOpenedIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/StartPageLastOpenedIT.java new file mode 100644 index 000000000000..5e26ced371df --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/StartPageLastOpenedIT.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Users; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.shared.security.RestPermissions; +import org.junit.jupiter.api.BeforeAll; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.core.StringContains.containsString; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS) +public class StartPageLastOpenedIT { + private final GraylogApis api; + + private static final Users.User user = new Users.User( + "john.doe2", + "asdfgh", + "John", + "Doe", + "john.doe2@example.com", + false, + 30_000, + "Europe/Vienna", + Collections.emptyList(), + List.of(RestPermissions.DASHBOARDS_CREATE) + ); + + public StartPageLastOpenedIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void init() { + api.users().createUser(user); + } + + //This test has to be the only test in this class. + //Because of the memoization in Catalog, we cannot guarantee that after search/dashboard creation it will be immediately visible in the catalog. + //Keeping this test independent prevents us from using unnecessary sleep of a few seconds. + @ContainerMatrixTest + void testCreateLastOpenedItem() { + api.postWithResource("/views/search", user, "org/graylog/plugins/views/startpage-save-search-request.json", 201); + api.postWithResource("/views", user, "org/graylog/plugins/views/startpage-views-request.json", 200); + + var validatableResponse = api.get("/views", user, Map.of(), 200); + var id = validatableResponse.extract().jsonPath().get("views[0].id").toString(); + + api.get("/views/" + id, user, Map.of(), 200); + validatableResponse = api.get("/startpage/lastOpened", user, Map.of(), 200); + validatableResponse.assertThat().body("lastOpened[0].grn", containsString(id)); + } + +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/StartPageRecentActivityIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/StartPageRecentActivityIT.java new file mode 100644 index 000000000000..9a7b03bba662 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/StartPageRecentActivityIT.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Streams; +import org.graylog.testing.completebackend.apis.Users; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import java.util.Map; + +import static org.hamcrest.core.StringContains.containsString; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS) +public class StartPageRecentActivityIT { + + private final GraylogApis api; + + public StartPageRecentActivityIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void testCreateRecentActivity() { + final String defaultIndexSetId = api.indices().defaultIndexSetId(); + var stream1Id = api.streams().createStream("Stream #1", defaultIndexSetId, Streams.StreamRule.exact("stream1", "target_stream", false)); + + var validatableResponse = api.get("/startpage/recentActivity", Users.LOCAL_ADMIN, Map.of(), 200); + validatableResponse.assertThat().body("recentActivity[0].item_grn", containsString(stream1Id)); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/SuggestionResourceIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/SuggestionResourceIT.java new file mode 100644 index 000000000000..b9e7eab02d33 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/SuggestionResourceIT.java @@ -0,0 +1,259 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import com.fasterxml.jackson.annotation.JsonInclude; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Streams; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import javax.annotation.Nullable; +import java.util.Set; + +import static io.restassured.RestAssured.given; +import static org.graylog.testing.completebackend.Lifecycle.CLASS; +import static org.graylog.testing.completebackend.Lifecycle.VM; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.core.IsEqual.equalTo; + +@ContainerMatrixTestsConfiguration(serverLifecycle = CLASS, searchVersions = {SearchServer.ES7, SearchServer.OS1, SearchServer.OS2, SearchServer.OS2_LATEST, SearchServer.DATANODE_DEV}) +public class SuggestionResourceIT { + private final GraylogApis api; + + private String stream1Id; + private String stream2Id; + + public SuggestionResourceIT(GraylogApis api) { + this.api = api; + } + + record SuggestionsRequest(String field, String input, + @JsonInclude(JsonInclude.Include.NON_NULL) @Nullable Set streams, + @JsonInclude(JsonInclude.Include.NON_NULL) @Nullable Integer size) { + static SuggestionsRequest create(String field, String input) { + return new SuggestionsRequest(field, input, null, null); + } + + static SuggestionsRequest create(String field, String input, Set streams) { + return new SuggestionsRequest(field, input, streams, null); + } + + static SuggestionsRequest create(String field, String input, int size) { + return new SuggestionsRequest(field, input, null, size); + } + } + + @BeforeAll + public void init() { + final String defaultIndexSetId = api.indices().defaultIndexSetId(); + this.stream1Id = api.streams().createStream("Stream #1", defaultIndexSetId, Streams.StreamRule.exact("stream1", "target_stream", false)); + this.stream2Id = api.streams().createStream("Stream #2", defaultIndexSetId, Streams.StreamRule.exact("stream2", "target_stream", false)); + + api.gelf().createGelfHttpInput() + .postMessage( + """ + {"short_message":"SuggestionResourceIT#1", + "host":"example.org", + "facility":"junit", + "_target_stream": "stream1", + "http_response_code": 200 + }""") + .postMessage( + """ + {"short_message":"SuggestionResourceIT#2", + "host":"example.org", + "facility":"test", + "_target_stream": "stream1", + "http_response_code": 200 + }""") + .postMessage( + """ + {"short_message":"SuggestionResourceIT#3", + "host":"example.org", + "facility":"test", + "_target_stream": "stream1", + "http_response_code": 201 + }""") + .postMessage( + """ + {"short_message":"SuggestionResourceIT#4", + "host":"foreign.org", + "facility":"test", + "_target_stream": "stream2", + "http_response_code": 404 + }""") + .postMessage( + """ + {"short_message":"SuggestionResourceIT#5", + "host":"something-else.org", + "foo":"bar", + }"""); + + api.search().waitForMessages( + "SuggestionResourceIT#1", + "SuggestionResourceIT#2", + "SuggestionResourceIT#3", + "SuggestionResourceIT#4", + "SuggestionResourceIT#5" + ); + + api.fieldTypes().waitForFieldTypeDefinitions("gl2_source_node", "gl2_source_input", "streams"); + } + + @ContainerMatrixTest + void testMinimalRequest() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("facility", "")) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.value[0]", equalTo("test")) + .body("suggestions.occurrence[0]", greaterThanOrEqualTo(3)); + } + + @ContainerMatrixTest + void testNumericalValueSuggestion() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("http_response_code", "20")) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.value[0]", equalTo("200")) + .body("suggestions.occurrence[0]", greaterThanOrEqualTo(2)); + } + + @ContainerMatrixTest + void testAugmentedSuggestionTitlesForStreams() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("streams", "")) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.title", hasItems("Default Stream", "Stream #1", "Stream #2")); + } + + @ContainerMatrixTest + void testAugmentedSuggestionTitlesForNodes() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("gl2_source_node", "")) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.title", not(empty())); + } + + @ContainerMatrixTest + void testAugmentedSuggestionTitlesForInputs() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("gl2_source_input", "")) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.title", hasItems("Integration test GELF input")); + } + + @ContainerMatrixTest + void testSuggestionsAreLimitedToStream() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("source", "", Set.of(stream1Id))) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.value[0]", equalTo("example.org")) + .body("suggestions.occurrence[0]", equalTo(3)); + + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("source", "", Set.of(stream2Id))) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.value[0]", equalTo("foreign.org")) + .body("suggestions.occurrence[0]", equalTo(1)); + } + + @ContainerMatrixTest + void testInvalidField() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("message", "foo")) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + // error types and messages are different for each ES version, so let's just check that there is an error in the response + .body("error", notNullValue()); + } + + @ContainerMatrixTest + void testSizeOtherDocsCount() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("facility", "", 1)) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.value[0]", equalTo("test")) + .body("suggestions.occurrence[0]", greaterThanOrEqualTo(2)) + .body("sum_other_docs_count", greaterThanOrEqualTo(1)); + } + + @ContainerMatrixTest + void testTypoCorrection() { + given() + .spec(api.requestSpecification()) + .when() + .body(SuggestionsRequest.create("facility", "tets")) + .post("/search/suggest") + .then() + .statusCode(200) + .assertThat().log().ifValidationFails() + .body("suggestions.value[0]", equalTo("test")) + .body("suggestions.occurrence[0]", greaterThanOrEqualTo(1)); + } + +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/TimeLimitIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/TimeLimitIT.java new file mode 100644 index 000000000000..0670e6ab53a4 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/TimeLimitIT.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.AfterEach; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; +import static org.hamcrest.core.IsEqual.equalTo; + +@ContainerMatrixTestsConfiguration +public class TimeLimitIT { + private final GraylogApis api; + + public TimeLimitIT(GraylogApis api) { + this.api = api; + } + + @AfterEach + public void resetConfig() { + final ValidatableResponse response = given() + .spec(api.requestSpecification()) + .when() + .body(getClass().getClassLoader().getResourceAsStream("org/graylog/plugins/views/cluster-search-config-reset.json")) + .put("/system/cluster_config/org.graylog2.indexer.searches.SearchesClusterConfig") + .then() + .statusCode(202); + } + + @ContainerMatrixTest + void testQueryTimeRangeLimit() { + given() + .spec(api.requestSpecification()) + .when() + .body(getClass().getClassLoader().getResourceAsStream("org/graylog/plugins/views/cluster-search-config.json")) + .put("/system/cluster_config/org.graylog2.indexer.searches.SearchesClusterConfig") + .then() + .statusCode(202) + .body("query_time_range_limit", equalTo("PT2M")); + + final String body = given() + .spec(api.requestSpecification()) + .when() + .body(getClass().getClassLoader().getResourceAsStream("org/graylog/plugins/views/minimalistic-request.json")) + .post("/views/search/sync") + .then() + .statusCode(200) + .assertThat().body("execution.completed_exceptionally", equalTo(true)) + .extract() + .body().asString(); + assertThat(body).contains("Search out of allowed time range limit"); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/ViewsResourceIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/ViewsResourceIT.java new file mode 100644 index 000000000000..14e1a69da5bf --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/ViewsResourceIT.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views; + +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import static org.hamcrest.core.IsEqual.equalTo; + +@ContainerMatrixTestsConfiguration +public class ViewsResourceIT { + private final GraylogApis api; + + public ViewsResourceIT(GraylogApis apis) { + this.api = apis; + } + + @ContainerMatrixTest + void testEmptyBody() { + api.post("/views", 400) + .assertThat().body("message[0]", equalTo("View is mandatory")); + } + + @ContainerMatrixTest + void testCreateViewRequestWithoutPersistedSearch() { + api.postWithResource("/views", "org/graylog/plugins/views/views-request.json", 400); + } + + @ContainerMatrixTest + void testCreateSearchPersistView() { + api.postWithResource("/views/search", "org/graylog/plugins/views/save-search-request.json", 201); + api.postWithResource("/views", "org/graylog/plugins/views/views-request.json", 200); + } + + @ContainerMatrixTest + void testInvalidSearchType() { + api.postWithResource("/views/search", "org/graylog/plugins/views/save-search-request-invalid.json", 201); + api.postWithResource("/views", "org/graylog/plugins/views/views-request-invalid-search-type.json", 400) + .assertThat() + .body("message", equalTo("Search types do not correspond to view/search types, missing searches [967d2217-fd99-48a6-b829-5acdab906808]; search types: [967d2217-fd99-48a6-b829-5acdab906807]; state types: [967d2217-fd99-48a6-b829-5acdab906808]")); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/AggregationSortingIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/AggregationSortingIT.java new file mode 100644 index 000000000000..db8fabedafb7 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/AggregationSortingIT.java @@ -0,0 +1,206 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views.aggregations; + +import com.github.rholder.retry.RetryException; +import io.restassured.response.ValidatableResponse; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.PivotSort; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSort; +import org.graylog.plugins.views.search.searchtypes.pivot.SortSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Values; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Count; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.inputs.PortBoundGelfInputApi; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.stream.IntStream; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.graylog.testing.containermatrix.SearchServer.ES7; +import static org.graylog.testing.containermatrix.SearchServer.OS1; +import static org.graylog.testing.containermatrix.SearchServer.OS2_LATEST; +import static org.hamcrest.Matchers.is; + +@ContainerMatrixTestsConfiguration(searchVersions = {ES7, OS1, OS2_LATEST}) +public class AggregationSortingIT { + private static final String numericField = "numeric_field"; + private static final String nonNumericField = "non_numeric_field"; + + private final GraylogApis api; + private PortBoundGelfInputApi gelfInput; + + public AggregationSortingIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + void setUp() { + this.gelfInput = api.gelf().createGelfHttpInput(); + } + + @ContainerMatrixTest + void sortingOnNumericPivotFieldSortsNumerically() throws ExecutionException, RetryException { + final var values = Set.of(9, 8, 4, 25, 2, 15, 1); + final var messagePrefix = "sorting on numeric pivot test "; + try (final var env = createEnvironment()) { + IntStream.range(0, 3).forEach((i) -> { + for (final var value : values) { + env.ingestMessage(Map.of( + nonNumericField, "foo", + numericField, value, + "short_message", messagePrefix + value + )); + } + }); + + final var pivotBuilder = Pivot.builder() + .rowGroups(Values.builder() + .fields(List.of(nonNumericField, numericField)).limit(10).build()) + .series(List.of()) + .rollup(false); + + env.waitForMessages(values.stream().map(value -> messagePrefix + value).toList()); + + env.waitForFieldTypes(numericField); + + final var resultDesc = env.executePivot( + pivotBuilder + .sort(PivotSort.create(numericField, SortSpec.Direction.Descending)) + .build() + ); + assertThat(resultDesc).isNotNull(); + + expectKeys(resultDesc, "25", "15", "9", "8", "4", "2", "1"); + + final var resultAsc = env.executePivot( + pivotBuilder + .sort(PivotSort.create(numericField, SortSpec.Direction.Ascending)) + .build()); + + expectKeys(resultAsc, "1", "2", "4", "8", "9", "15", "25"); + } + } + + @ContainerMatrixTest + void sortingOnNonNumericPivotFieldSortsLexicographically() throws ExecutionException, RetryException { + final var values = Set.of("B", "C", "D", "A", "E"); + final var messagePrefix = "sorting on non-numeric pivot test "; + try (final var env = createEnvironment()) { + IntStream.range(0, 3).forEach((i) -> { + for (final var value : values) { + env.ingestMessage(Map.of( + nonNumericField, value, + numericField, 42, + "short_message", messagePrefix + value + )); + } + }); + + final var pivotBuilder = Pivot.builder() + .rowGroups(Values.builder() + .fields(List.of(numericField, nonNumericField)).limit(10).build()) + .series(List.of()) + .rollup(false); + + env.waitForMessages(values.stream().map(value -> messagePrefix + value).toList()); + + env.waitForFieldTypes(numericField); + + final var resultDesc = env.executePivot( + pivotBuilder + .sort(PivotSort.create(nonNumericField, SortSpec.Direction.Ascending)) + .build() + ); + assertThat(resultDesc).isNotNull(); + + expectKeys(resultDesc, "A", "B", "C", "D", "E"); + + final var resultAsc = env.executePivot( + pivotBuilder + .sort(PivotSort.create(nonNumericField, SortSpec.Direction.Descending)) + .build()); + + expectKeys(resultAsc, "E", "D", "C", "B", "A"); + } + } + + @ContainerMatrixTest + void sortingOnBothNumericFieldAndMetric() throws ExecutionException, RetryException { + final var values = List.of(2, 4, 9, 1, 25, 2, 9, 4, 15); + final var messagePrefix = "Ingesting value "; + try (final var env = createEnvironment()) { + IntStream.range(0, 3).forEach((i) -> { + for (final var value : values) { + env.ingestMessage(Map.of( + nonNumericField, "Test", + numericField, value, + "short_message", messagePrefix + value + )); + } + }); + + env.waitForMessages(values.stream().distinct().map(value -> messagePrefix + value).toList()); + + env.waitForFieldTypes(numericField); + + final var pivotBuilder = Pivot.builder() + .rowGroups(Values.builder() + .fields(List.of(nonNumericField, numericField)).limit(10).build()) + .series(List.of(Count.builder().build())) + .rollup(false); + + final var resultAsc = env.executePivot( + pivotBuilder + .sort( + PivotSort.create(numericField, SortSpec.Direction.Ascending), + SeriesSort.create("count()", SortSpec.Direction.Descending) + ) + .build() + ); + + expectKeys(resultAsc, "1", "2", "4", "9", "15", "25"); + + final var resultDesc = env.executePivot( + pivotBuilder + .sort( + PivotSort.create(numericField, SortSpec.Direction.Descending), + SeriesSort.create("count()", SortSpec.Direction.Descending) + ) + .build() + ); + + expectKeys(resultDesc, "25", "15", "9", "4", "2", "1"); + } + } + + private void expectKeys(ValidatableResponse response, String... values) { + for (int i = 0; i < values.length; i++) { + response.body(".rows[" + i + "].key[1]", is(values[i])); + } + } + + private GraylogApis.SearchEnvironment createEnvironment() throws ExecutionException, RetryException { + return api.createEnvironment(gelfInput); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/CompoundFieldsAggregationIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/CompoundFieldsAggregationIT.java new file mode 100644 index 000000000000..72402c4ab9d2 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/CompoundFieldsAggregationIT.java @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views.aggregations; + +import com.github.rholder.retry.RetryException; +import org.graylog.testing.completebackend.apis.DefaultStreamMatches; +import org.graylog.testing.completebackend.apis.GraylogApiResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Streams; +import org.graylog.testing.completebackend.apis.inputs.PortBoundGelfInputApi; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.plugin.streams.StreamRuleType; +import org.junit.jupiter.api.BeforeEach; + +import java.util.List; +import java.util.concurrent.ExecutionException; + +import static org.graylog.testing.containermatrix.SearchServer.ES7; +import static org.graylog.testing.containermatrix.SearchServer.OS1; +import static org.graylog.testing.containermatrix.SearchServer.OS2; +import static org.graylog.testing.containermatrix.SearchServer.OS2_4; +import static org.graylog.testing.containermatrix.SearchServer.OS2_LATEST; + +@ContainerMatrixTestsConfiguration(searchVersions = {ES7, OS1, OS2, OS2_4, OS2_LATEST}) +public class CompoundFieldsAggregationIT { + + private final GraylogApis api; + + public CompoundFieldsAggregationIT(GraylogApis api) { + this.api = api; + } + + @BeforeEach + void setUp() throws ExecutionException, RetryException { + final String indexSetA = api.indices().createIndexSet("Compound field index A", "Compound field index A", "compound_a"); + final String indexSetB = api.indices().createIndexSet("Compound field index B", "Compound field index B", "compound_b"); + + final String streamA = api.streams().createStream("Stream A", indexSetA, DefaultStreamMatches.REMOVE, new Streams.StreamRule(StreamRuleType.EXACT.toInteger(), "streamA", "target_stream", false)); + final String streamB = api.streams().createStream("Stream B", indexSetB, DefaultStreamMatches.REMOVE, new Streams.StreamRule(StreamRuleType.EXACT.toInteger(), "streamB", "target_stream", false)); + + final List indexNamesA = api.indices().waitForIndexNames(indexSetA); + final List indexNamesB = api.indices().waitForIndexNames(indexSetB); + + final String indexA = indexNamesA.iterator().next(); + final String indexB = indexNamesB.iterator().next(); + + api.backend().searchServerInstance().client().putFieldMapping(indexA, "my_ip", "ip"); + api.backend().searchServerInstance().client().putFieldMapping(indexB, "my_ip", "keyword"); + + final PortBoundGelfInputApi gelf = api.gelf().createGelfHttpInput(); + gelf.postMessage(""" + {"short_message":"compound-field-test-a", "host":"example.org", "_my_ip":"192.168.1.1", "_target_stream": "streamA"} + """); + gelf.postMessage(""" + {"short_message":"compound-field-test-b", "host":"example.org", "_my_ip":"8.8.8.8", "_target_stream": "streamB"} + """); + + api.search().waitForMessages("compound-field-test-a", "compound-field-test-b"); + } + + @ContainerMatrixTest + void aggregate() { + final GraylogApiResponse responseAsc = + new GraylogApiResponse(api.post("/search/aggregate",""" + { + "group_by": [ + { + "field": "my_ip" + } + ], + "metrics": [ + { + "function": "count", + "field": "my_ip", + "sort": "desc" + } + ] + } + """, 200)); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/SearchAggregationsIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/SearchAggregationsIT.java new file mode 100644 index 000000000000..4827c2f8ca48 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/SearchAggregationsIT.java @@ -0,0 +1,917 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views.aggregations; + +import com.google.common.base.Joiner; +import io.restassured.response.ValidatableResponse; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.PivotSort; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSort; +import org.graylog.plugins.views.search.searchtypes.pivot.SortSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Time; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.TimeUnitInterval; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Values; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Average; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Count; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Latest; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Max; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Min; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Percentage; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.graylog.testing.containermatrix.SearchServer.ES7; +import static org.graylog.testing.containermatrix.SearchServer.OS1; +import static org.graylog.testing.containermatrix.SearchServer.OS2_LATEST; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +@ContainerMatrixTestsConfiguration(searchVersions = {ES7, OS1, OS2_LATEST}) +public class SearchAggregationsIT { + private static final String PIVOT_NAME = "pivotaggregation"; + private static final String PIVOT_PATH = "results.query1.search_types." + PIVOT_NAME; + + private final GraylogApis api; + + public SearchAggregationsIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void setUp() { + this.api.backend().importElasticsearchFixture("random-http-logs.json", SearchAggregationsIT.class); + } + + @ContainerMatrixTest + void testZeroPivots() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(Collections.emptyList(), List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testZeroPivotsWithLatestMetric() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Latest.builder().field("http_method").build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(Collections.emptyList(), List.of("latest(http_method)")), equalTo("GET")); + } + + @ContainerMatrixTest + void testSingleRowPivot() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups(Values.builder().field("http_method").build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(5)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult("GET", "count()"), equalTo(860)) + .body(pathToMetricResult("DELETE", "count()"), equalTo(52)) + .body(pathToMetricResult("POST", "count()"), equalTo(45)) + .body(pathToMetricResult("PUT", "count()"), equalTo(43)) + .body(pathToMetricResult(Collections.emptyList(), List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testUnknownFieldsPivot() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups(Values.builder().fields(List.of("http_method", "unknown_field_1", "unknown_field_2")).build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(4)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(List.of("GET", "(Empty Value)", "(Empty Value)"), List.of("count()")), equalTo(860)) + .body(pathToMetricResult(List.of("DELETE", "(Empty Value)", "(Empty Value)"), List.of("count()")), equalTo(52)) + .body(pathToMetricResult(List.of("POST", "(Empty Value)", "(Empty Value)"), List.of("count()")), equalTo(45)) + .body(pathToMetricResult(List.of("PUT", "(Empty Value)", "(Empty Value)"), List.of("count()")), equalTo(43)); + } + + @ContainerMatrixTest + void testUnknownFieldsAroundUnknownPivot() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups(Values.builder().fields(List.of("unknown_field_1", "http_method", "unknown_field_2")).build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(4)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(List.of("(Empty Value)", "GET", "(Empty Value)"), List.of("count()")), equalTo(860)) + .body(pathToMetricResult(List.of("(Empty Value)", "DELETE", "(Empty Value)"), List.of("count()")), equalTo(52)) + .body(pathToMetricResult(List.of("(Empty Value)", "POST", "(Empty Value)"), List.of("count()")), equalTo(45)) + .body(pathToMetricResult(List.of("(Empty Value)", "PUT", "(Empty Value)"), List.of("count()")), equalTo(43)); + } + + @ContainerMatrixTest + void testUnknownFieldFirstPivot() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups(Values.builder().fields(List.of("unknown_field_1", "http_method")).build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(4)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(List.of("(Empty Value)", "GET"), List.of("count()")), equalTo(860)) + .body(pathToMetricResult(List.of("(Empty Value)", "DELETE"), List.of("count()")), equalTo(52)) + .body(pathToMetricResult(List.of("(Empty Value)", "POST"), List.of("count()")), equalTo(45)) + .body(pathToMetricResult(List.of("(Empty Value)", "PUT"), List.of("count()")), equalTo(43)); + } + + + @ContainerMatrixTest + void testAllUnknownFieldsPivot() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups(Values.builder().fields(List.of("unknown_field_1", "unknown_field_2", "unknown_field_3")).build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(List.of("(Empty Value)", "(Empty Value)", "(Empty Value)"), List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testFindTopPivot() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups(Values.builder().field("http_method").limit(1).build()) + .sort(SeriesSort.create(SeriesSort.Type, "max(took_ms)", SortSpec.Direction.Descending)) + .series(Max.builder().field("took_ms").build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult("GET", "max(took_ms)"), equalTo(5300.0f)); + } + + @ContainerMatrixTest + void testFindBottomPivot() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups(Values.builder().field("http_method").limit(1).build()) + .sort(SeriesSort.create(SeriesSort.Type, "max(took_ms)", SortSpec.Direction.Ascending)) + .series(Max.builder().field("took_ms").build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult("DELETE", "max(took_ms)"), equalTo(104.0f)); + } + + @ContainerMatrixTest + void testSingleRowPivotWithDateField() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups( + Time.builder() + .field("timestamp") + .interval(TimeUnitInterval.Builder.builder().timeunit("10s").build()) + .build() + ) + .series( + Count.builder().build(), + Average.builder().field("took_ms").build() + ) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(5)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult("2022-09-26T14:12:10.000Z", "count()"), equalTo(1)) + .body(pathToMetricResult("2022-09-26T14:12:10.000Z", "avg(took_ms)"), equalTo(51.0f)) + .body(pathToMetricResult("2022-09-26T14:12:20.000Z", "count()"), equalTo(395)) + .body(pathToMetricResult("2022-09-26T14:12:20.000Z", "avg(took_ms)"), equalTo(59.35443037974684f)) + .body(pathToMetricResult("2022-09-26T14:12:30.000Z", "count()"), equalTo(394)) + .body(pathToMetricResult("2022-09-26T14:12:30.000Z", "avg(took_ms)"), equalTo(70.2741116751269f)) + .body(pathToMetricResult("2022-09-26T14:12:40.000Z", "count()"), equalTo(210)) + .body(pathToMetricResult("2022-09-26T14:12:40.000Z", "avg(took_ms)"), equalTo(131.21904761904761f)) + .body(pathToMetricResult(Collections.emptyList(), List.of("count()")), equalTo(1000)) + .body(pathToMetricResult(Collections.emptyList(), List.of("avg(took_ms)")), equalTo(78.74f)); + } + + @ContainerMatrixTest + void testSingleRowPivotWithDateFieldAsColumnPivot() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups(Values.builder().field("http_method").build()) + .columnGroups( + Time.builder() + .field("timestamp") + .interval(TimeUnitInterval.Builder.builder().timeunit("10s").build()) + .build() + ) + .series(Average.builder().field("took_ms").build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(5)); + + final List> expectedKeys = List.of(List.of("GET"), List.of("DELETE"), List.of("POST"), List.of("PUT"), List.of()); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + + final List> actualRowKeys = validatableResponse.extract().path(searchTypeResult + ".key"); + + assertThat(actualRowKeys).isEqualTo(expectedKeys); + + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(List.of("GET"), List.of("2022-09-26T14:12:10.000Z", "avg(took_ms)")), equalTo(51.0f)) + .body(pathToMetricResult("DELETE", "avg(took_ms)"), equalTo(73.5576923076923f)) + .body(pathToMetricResult(List.of("DELETE"), List.of("2022-09-26T14:12:10.000Z", "avg(took_ms)")), is(nullValue())) + .body(pathToMetricResult("GET", "avg(took_ms)"), equalTo(63.14883720930233f)); + } + + @ContainerMatrixTest + void testSingleColumnPivot() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .columnGroups(Values.builder().field("http_method").build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(Collections.emptyList(), List.of("GET", "count()")), equalTo(860)) + .body(pathToMetricResult(Collections.emptyList(), List.of("DELETE", "count()")), equalTo(52)) + .body(pathToMetricResult(Collections.emptyList(), List.of("POST", "count()")), equalTo(45)) + .body(pathToMetricResult(Collections.emptyList(), List.of("PUT", "count()")), equalTo(43)) + .body(pathToMetricResult(Collections.emptyList(), List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testDoesNotReturnRollupWhenDisabled() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .columnGroups(Values.builder().field("http_method").build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(Collections.emptyList(), List.of("GET", "count()")), equalTo(860)) + .body(pathToMetricResult(Collections.emptyList(), List.of("DELETE", "count()")), equalTo(52)) + .body(pathToMetricResult(Collections.emptyList(), List.of("POST", "count()")), equalTo(45)) + .body(pathToMetricResult(Collections.emptyList(), List.of("PUT", "count()")), equalTo(43)) + .body(pathToMetricResult(Collections.emptyList(), List.of("count()")), is(nullValue())); + } + + @ContainerMatrixTest + void testSingleRowAndColumnPivots() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups(Values.builder().field("http_method").build()) + .columnGroups(Values.builder().field("http_response_code").build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(5)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse + .rootPath(searchTypeResultPath + "." + pathToRow(List.of("GET"))) + .body(pathToValue(List.of("200", "count()")), equalTo(847)) + .body(pathToValue(List.of("500", "count()")), equalTo(11)) + .body(pathToValue(List.of("504", "count()")), equalTo(2)) + .body(pathToValue(List.of("count()")), equalTo(860)); + + validatableResponse + .rootPath(searchTypeResultPath + "." + pathToRow(List.of("DELETE"))) + .body(pathToValue(List.of("204", "count()")), equalTo(51)) + .body(pathToValue(List.of("500", "count()")), equalTo(1)) + .body(pathToValue(List.of("count()")), equalTo(52)); + + validatableResponse + .rootPath(searchTypeResultPath + "." + pathToRow(List.of("POST"))) + .body(pathToValue(List.of("201", "count()")), equalTo(43)) + .body(pathToValue(List.of("500", "count()")), equalTo(1)) + .body(pathToValue(List.of("504", "count()")), equalTo(1)) + .body(pathToValue(List.of("count()")), equalTo(45)); + + validatableResponse + .rootPath(searchTypeResultPath + "." + pathToRow(List.of("PUT"))) + .body(pathToValue(List.of("200", "count()")), equalTo(42)) + .body(pathToValue(List.of("504", "count()")), equalTo(1)) + .body(pathToValue(List.of("count()")), equalTo(43)); + + validatableResponse + .rootPath(searchTypeResultPath + "." + pathToRow(Collections.emptySet())) + .body(pathToValue(List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testRowAndColumnPivotsWithMissingFields() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups(Values.builder().field("missing_row_pivot").build()) + .columnGroups(Values.builder().field("missing_column_pivot").build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(2)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("(Empty Value)"), List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testTwoNestedRowPivots() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups( + Values.builder().field("http_method").limit(15).build(), + Values.builder().field("http_response_code").limit(15).build() + ) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(11)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("GET", "200"), List.of("count()")), equalTo(847)) + .body(pathToMetricResult(List.of("GET", "500"), List.of("count()")), equalTo(11)) + .body(pathToMetricResult(List.of("GET", "504"), List.of("count()")), equalTo(2)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("DELETE", "204"), List.of("count()")), equalTo(51)) + .body(pathToMetricResult(List.of("DELETE", "500"), List.of("count()")), equalTo(1)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("POST", "201"), List.of("count()")), equalTo(43)) + .body(pathToMetricResult(List.of("POST", "500"), List.of("count()")), equalTo(1)) + .body(pathToMetricResult(List.of("POST", "504"), List.of("count()")), equalTo(1)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("PUT", "200"), List.of("count()")), equalTo(42)) + .body(pathToMetricResult(List.of("PUT", "504"), List.of("count()")), equalTo(1)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(Collections.emptyList(), List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testTwoTupleRowPivots() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups( + Values.builder().fields(List.of("http_method", "http_response_code")).limit(15).build() + ) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(11)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("GET", "200"), List.of("count()")), equalTo(847)) + .body(pathToMetricResult(List.of("GET", "500"), List.of("count()")), equalTo(11)) + .body(pathToMetricResult(List.of("GET", "504"), List.of("count()")), equalTo(2)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("DELETE", "204"), List.of("count()")), equalTo(51)) + .body(pathToMetricResult(List.of("DELETE", "500"), List.of("count()")), equalTo(1)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("POST", "201"), List.of("count()")), equalTo(43)) + .body(pathToMetricResult(List.of("POST", "500"), List.of("count()")), equalTo(1)) + .body(pathToMetricResult(List.of("POST", "504"), List.of("count()")), equalTo(1)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(List.of("PUT", "200"), List.of("count()")), equalTo(42)) + .body(pathToMetricResult(List.of("PUT", "504"), List.of("count()")), equalTo(1)); + + validatableResponse + .rootPath(searchTypeResultPath) + .body(pathToMetricResult(Collections.emptyList(), List.of("count()")), equalTo(1000)); + } + + @ContainerMatrixTest + void testTwoNestedRowPivotsWithSorting() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups( + Values.builder().field("http_method").limit(15).build(), + Values.builder().field("http_response_code").limit(15).build() + ) + .sort(PivotSort.create("http_response_code", SortSpec.Direction.Ascending)) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(10)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + final List> rows = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.key"); + final List> metrics = validatableResponse + .extract() + .jsonPath() + .getList(searchTypeResultPath + "*.values*.value"); + + assertThat(rows).containsExactly( + List.of("DELETE", "204"), + List.of("DELETE", "500"), + List.of("GET", "200"), + List.of("GET", "500"), + List.of("GET", "504"), + List.of("POST", "201"), + List.of("POST", "500"), + List.of("POST", "504"), + List.of("PUT", "200"), + List.of("PUT", "504") + ); + assertThat(metrics).containsExactly( + List.of(51), + List.of(1), + List.of(847), + List.of(11), + List.of(2), + List.of(43), + List.of(1), + List.of(1), + List.of(42), + List.of(1) + ); + } + + @ContainerMatrixTest + void testTwoTupleRowPivotsWithSorting() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups( + Values.builder().fields(List.of("http_method", "http_response_code")).limit(15).build() + ) + .sort(PivotSort.create("http_response_code", SortSpec.Direction.Ascending)) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(10)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + final List> rows = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.key"); + + assertThat(rows).containsExactly( + List.of("GET", "200"), + List.of("PUT", "200"), + List.of("POST", "201"), + List.of("DELETE", "204"), + List.of("DELETE", "500"), + List.of("GET", "500"), + List.of("POST", "500"), + List.of("GET", "504"), + List.of("POST", "504"), + List.of("PUT", "504") + ); + } + + @ContainerMatrixTest + void testTwoTupleRowPivotsWithMetricsSorting() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups( + Values.builder().fields(List.of("action", "controller")).limit(15).build() + ) + .series(List.of(Max.builder().field("took_ms").build(), Min.builder().field("took_ms").build())) + .sort(SeriesSort.create("min(took_ms)", SortSpec.Direction.Ascending), SeriesSort.create("max(took_ms)", SortSpec.Direction.Descending)) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(5)); + + final List> rows = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.values*.value"); + + assertThat(rows).containsExactly( + List.of(5300.0f, 36.0f), + List.of(5000.0f, 36.0f), + List.of(174.0f, 36.0f), + List.of(138.0f, 36.0f), + List.of(147.0f, 37.0f) + ); + } + + @ContainerMatrixTest + void testTwoNestedRowPivotsWithMetricsSorting() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups( + Values.builder().field("action").limit(15).build(), + Values.builder().field("controller").limit(15).build() + ) + .series(List.of(Max.builder().field("took_ms").build(), Min.builder().field("took_ms").build())) + .sort(SeriesSort.create("min(took_ms)", SortSpec.Direction.Ascending), SeriesSort.create("max(took_ms)", SortSpec.Direction.Descending)) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(5)); + + final List> rows = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.values*.value"); + + assertThat(rows).containsExactly( + List.of(5300.0f, 36.0f), + List.of(147.0f, 37.0f), + List.of(5000.0f, 36.0f), + List.of(174.0f, 36.0f), + List.of(138.0f, 36.0f) + ); + } + + @ContainerMatrixTest + void testTopLevelSeries() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .series(List.of(Max.builder().field("took_ms").build(), Min.builder().field("took_ms").build())) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(1)); + + final List> rows = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.values*.value"); + + assertThat(rows).containsExactly(List.of(5300.0f, 36.0f)); + } + + @ContainerMatrixTest + void testTwoIdenticalSeries() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(List.of( + Max.builder().field("took_ms").build(), + Max.builder().field("took_ms").build() + )) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(1)); + + final List> rows = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.values*.value"); + + assertThat(rows).containsExactly(List.of(5300.0f, 5300.0f)); + } + + @ContainerMatrixTest + void testTwoIdenticalSeriesOneWithCustomId() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(List.of( + Max.builder().id("Maximum Response Time").field("took_ms").build(), + Max.builder().field("took_ms").build() + )) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)); + + final String searchTypeResultPath = PIVOT_PATH + ".rows"; + + validatableResponse.rootPath(PIVOT_PATH) + .body("total", equalTo(1000)) + .body("rows", hasSize(1)); + + final List>> rowKeys = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.values*.key"); + + assertThat(rowKeys).containsExactly(List.of( + Collections.singletonList("Maximum Response Time"), + Collections.singletonList("max(took_ms)") + )); + + final List> rowValues = validatableResponse + .extract() + .jsonPath().getList(searchTypeResultPath + "*.values*.value"); + + assertThat(rowValues).containsExactly(List.of(5300.0f, 5300.0f)); + } + + // Percentage Metric tests + @ContainerMatrixTest + void testSimplestPercentageMetricWithCount() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .rowGroups(Values.builder().field("http_method").build()) + .series(Percentage.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(4)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult("GET", "percentage(,COUNT)"), equalTo(0.86f)) + .body(pathToMetricResult("DELETE", "percentage(,COUNT)"), equalTo(0.052f)) + .body(pathToMetricResult("POST", "percentage(,COUNT)"), equalTo(0.045f)) + .body(pathToMetricResult("PUT", "percentage(,COUNT)"), equalTo(0.043f)); + } + + @ContainerMatrixTest + void testPercentageMetricWithCountOnField() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups(Values.builder().field("http_method").build()) + .series(Percentage.builder().strategy(Percentage.Strategy.COUNT).field("http_method").build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(5)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult("GET", "percentage(http_method,COUNT)"), equalTo(0.86f)) + .body(pathToMetricResult("DELETE", "percentage(http_method,COUNT)"), equalTo(0.052f)) + .body(pathToMetricResult("POST", "percentage(http_method,COUNT)"), equalTo(0.045f)) + .body(pathToMetricResult("PUT", "percentage(http_method,COUNT)"), equalTo(0.043f)); + } + + @ContainerMatrixTest + void testPercentageMetricWithCountOnFieldForColumnPivotOnly() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .columnGroups(Values.builder().field("http_method").build()) + .series(Percentage.builder().strategy(Percentage.Strategy.COUNT).field("http_method").build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(1)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(List.of(), List.of("GET", "percentage(http_method,COUNT)")), equalTo(0.86f)) + .body(pathToMetricResult(List.of(), List.of("DELETE", "percentage(http_method,COUNT)")), equalTo(0.052f)) + .body(pathToMetricResult(List.of(), List.of("POST", "percentage(http_method,COUNT)")), equalTo(0.045f)) + .body(pathToMetricResult(List.of(), List.of("PUT", "percentage(http_method,COUNT)")), equalTo(0.043f)); + } + + @ContainerMatrixTest + void testPercentageMetricWithSumOnField() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups(Values.builder().field("http_method").build()) + .series(Percentage.builder().strategy(Percentage.Strategy.SUM).field("took_ms").build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + validatableResponse.rootPath(PIVOT_PATH) + .body("rows", hasSize(5)); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult("GET", "percentage(took_ms,SUM)"), equalTo(0.689713f)) + .body(pathToMetricResult("DELETE", "percentage(took_ms,SUM)"), equalTo(0.04857759715519431f)) + .body(pathToMetricResult("POST", "percentage(took_ms,SUM)"), equalTo(0.148501397002794f)) + .body(pathToMetricResult("PUT", "percentage(took_ms,SUM)"), equalTo(0.11320802641605283f)); + } + + @ContainerMatrixTest + void testBooleanFieldsAreReturnedAsTrueOrFalse() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .rowGroups(Values.builder().field("test_boolean").build(), Values.builder().field("user_id").build()) + .series(Count.builder().build()) + .build(); + + final ValidatableResponse validatableResponse = executePivot(pivot); + + final String searchTypeResult = PIVOT_PATH + ".rows"; + validatableResponse + .rootPath(searchTypeResult) + .body(pathToMetricResult(List.of("true", "6476752"), List.of("count()")), equalTo(1)) + .body(pathToMetricResult(List.of("false", "6469981"), List.of("count()")), equalTo(1)); + } + + private String listToGroovy(Collection strings) { + final List quotedStrings = strings.stream() + .map(string -> "'" + string + "'") + .collect(Collectors.toList()); + + final String quotedList = Joiner.on(", ").join(quotedStrings); + return "[" + quotedList + "]"; + } + + private String pathToMetricResult(String key, String metric) { + return pathToMetricResult(List.of(key), List.of(metric)); + } + + private String pathToValue(Collection metric) { + return "values.find { value -> value.key == " + listToGroovy(metric) + " }.value"; + } + + private String pathToRow(Collection keys) { + return "find { it.key == " + listToGroovy(keys) + " }"; + } + + private String pathToMetricResult(Collection keys, Collection metric) { + return pathToRow(keys) + "." + pathToValue(metric); + } + + private ValidatableResponse executePivot(Pivot pivot) { + return api.search().executePivot(pivot) + .body(".total", equalTo(1000)); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/SearchWithAggregationsSupportingMissingBucketsIT.java b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/SearchWithAggregationsSupportingMissingBucketsIT.java new file mode 100644 index 000000000000..37855f75c455 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/plugins/views/aggregations/SearchWithAggregationsSupportingMissingBucketsIT.java @@ -0,0 +1,265 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.plugins.views.aggregations; + +import io.restassured.response.ValidatableResponse; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Values; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Average; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Count; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.graylog.plugins.views.search.aggregations.MissingBucketConstants.MISSING_BUCKET_NAME; +import static org.graylog.testing.containermatrix.SearchServer.ES7; +import static org.graylog.testing.containermatrix.SearchServer.OS1; +import static org.graylog.testing.containermatrix.SearchServer.OS2; +import static org.graylog.testing.containermatrix.SearchServer.OS2_LATEST; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.IsNot.not; + +@ContainerMatrixTestsConfiguration(searchVersions = {OS1, ES7, OS2, OS2_LATEST}) +public class SearchWithAggregationsSupportingMissingBucketsIT { + + @SuppressWarnings("unused") + //use this fixtureType:474877 in all fixtures to assure this test isolation from the others + private static final String FIXTURE_TYPE_FIELD_VALUE = "474877"; + + private final GraylogApis api; + + public SearchWithAggregationsSupportingMissingBucketsIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void setUp() { + this.api.backend().importElasticsearchFixture("messages-for-missing-aggregation-check.json", SearchWithAggregationsSupportingMissingBucketsIT.class); + } + + private ValidatableResponse execute(Pivot pivot) { + return api.search().executePivot(pivot, "fixtureType:" + FIXTURE_TYPE_FIELD_VALUE) + .body(".total", equalTo(5)); + } + + @ContainerMatrixTest + void testSingleFieldAggregationHasProperMissingBucket() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Count.builder().build(), Average.builder().field("age").build()) + .rowGroups(Values.builder().field("firstName").limit(8).build()) + .build(); + final ValidatableResponse validatableResponse = execute(pivot); + + //General verification + validatableResponse + .body(".rows", hasSize(5)) + .body(".total", equalTo(5)) + .body(".rows.find{ it.key[0] == 'Joe' }", notNullValue()) + .body(".rows.find{ it.key[0] == 'Jane' }", notNullValue()) + .body(".rows.find{ it.key[0] == 'Bob' }", notNullValue()) + .body(".rows.find{ it.key[0] == '" + MISSING_BUCKET_NAME + "' }", notNullValue()) + .body(".rows.find{ it.key == [] }", notNullValue()); + + //Empty bucket verification (should precede the last/total one - index 3) + //The only message with "empty" first name in a fixture is {(...)"lastName": "Cooper","age": 60(...)} + validatableResponse.body(".rows[3].key", contains(MISSING_BUCKET_NAME)); + validatableResponse.body(".rows[3].values[0].key", contains("count()")); + validatableResponse.body(".rows[3].values[0].value", equalTo(1)); + validatableResponse.body(".rows[3].values[1].key", contains("avg(age)")); + validatableResponse.body(".rows[3].values[1].value", equalTo(60.0f)); + + //Top bucket verification + //There are 2 "Joes" in a fixture: {(...)"lastName": "Smith","age": 50(...)} and {(...)"lastName": "Biden","age": 80(...)} + validatableResponse.body(".rows[0].key", contains("Joe")); + validatableResponse.body(".rows[0].values[0].key", contains("count()")); + validatableResponse.body(".rows[0].values[0].value", equalTo(2)); + validatableResponse.body(".rows[0].values[1].key", contains("avg(age)")); + validatableResponse.body(".rows[0].values[1].value", equalTo(65.0f)); + + } + + @ContainerMatrixTest + void testSingleFieldAggregationHasNoMissingBucketWhenSkipEmptyValuesIsUsed() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Count.builder().build(), Average.builder().field("age").build()) + .rowGroups(Values.builder().field("firstName").limit(8).skipEmptyValues().build()) + .build(); + final ValidatableResponse validatableResponse = execute(pivot); + + //General verification + validatableResponse + .body(".rows", hasSize(4)) + .body(".total", equalTo(5)) + .body(".rows.find{ it.key[0] == 'Joe' }", notNullValue()) + .body(".rows.find{ it.key[0] == 'Jane' }", notNullValue()) + .body(".rows.find{ it.key[0] == 'Bob' }", notNullValue()) + .body(".rows.find{ it.key[0] == '" + MISSING_BUCKET_NAME + "' }", nullValue()) + .body(".rows.find{ it.key == [] }", notNullValue()); + + //Empty bucket verification (should precede the last/total one - index 3) + //The only message with "empty" first name in a fixture is {(...)"lastName": "Cooper","age": 60(...)} + validatableResponse.body(".rows[3].key", not(contains(MISSING_BUCKET_NAME))); + + //Top bucket verification + //There are 2 "Joes" in a fixture: {(...)"lastName": "Smith","age": 50(...)} and {(...)"lastName": "Biden","age": 80(...)} + validatableResponse.body(".rows[0].key", contains("Joe")); + validatableResponse.body(".rows[0].values[0].key", contains("count()")); + validatableResponse.body(".rows[0].values[0].value", equalTo(2)); + validatableResponse.body(".rows[0].values[1].key", contains("avg(age)")); + validatableResponse.body(".rows[0].values[1].value", equalTo(65.0f)); + + } + + @ContainerMatrixTest + void testTwoTupledFieldAggregationHasProperMissingBucket() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Count.builder().build(), Average.builder().field("age").build()) + .rowGroups( + Values.builder().fields(List.of("firstName", "lastName")).limit(8).build() + ) + .build(); + final ValidatableResponse validatableResponse = execute(pivot); + + //General verification + validatableResponse + .body(tupledItemPath(MISSING_BUCKET_NAME, "Cooper"), hasItems(List.of(1, 60.0f))) + .body(tupledItemPath("Bob", MISSING_BUCKET_NAME), hasItems(List.of(1, 60.0f))) + .body(tupledItemPath("Joe", "Smith"), hasItems(List.of(1, 50.0f))) + .body(tupledItemPath("Joe", "Biden"), hasItems(List.of(1, 80.0f))) + .body(tupledItemPath("Jane", "Smith"), hasItems(List.of(1, 40.0f))) + .body(".rows.find{ it.key == [] }.values.value", hasItems(5, 58.0f)) //totals + .body(".rows", hasSize(6)) + .body(".total", equalTo(5)); + } + + private String tupledItemPath(String... keys) { + var condition = IntStream.range(0, keys.length) + .mapToObj(idx -> "it.key[" + idx + "] == '" + keys[idx] + "'") + .collect(Collectors.joining(" && ")); + + return ".rows.findAll { " + condition + " }.values.value"; + } + + @ContainerMatrixTest + void testTwoTupledFieldAggregationHasNoMissingBucketWhenSkipEmptyValuesIsUsed() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Count.builder().build(), Average.builder().field("age").build()) + .rowGroups( + Values.builder().fields(List.of("firstName", "lastName")).limit(8).skipEmptyValues().build() + ) + .build(); + final ValidatableResponse validatableResponse = execute(pivot); + + //General verification + validatableResponse + .body(".rows", hasSize(4)) + .body(".rows.findAll{ it.key[0] == 'Joe' }", hasSize(2)) // Joe-Biden, Joe-Smith + .body(".rows.findAll{ it.key[0] == 'Jane' }", hasSize(1)) // Jane-Smith + .body(".rows.findAll{ it.key[0] == '" + MISSING_BUCKET_NAME + "' }", hasSize(0)) + .body(".rows.find{ it.key == [] }", notNullValue()) //totals + .body(".total", equalTo(5)); + + //Empty buckets verification + //We have only one entry with missing first name {(...)"lastName": "Cooper","age": 60(...)}, so both empty buckets will have the same values + validatableResponse.body(".rows.find{ it.key == ['" + MISSING_BUCKET_NAME + "'] }.values.value", nullValue()); + } + + @ContainerMatrixTest + void testTwoNestedFieldAggregationHasProperMissingBucket() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Count.builder().build(), Average.builder().field("age").build()) + .rowGroups( + Values.builder().field("firstName").limit(8).build(), + Values.builder().field("lastName").limit(8).build() + ) + .build(); + final ValidatableResponse validatableResponse = execute(pivot); + + //General verification + validatableResponse + .body(".rows", hasSize(6)) + .body(".rows.findAll{ it.key[0] == 'Joe' }", hasSize(2)) // Joe-Biden, Joe-Smith + .body(".rows.findAll{ it.key[0] == 'Jane' }", hasSize(1)) // Jane-Smith + .body(".rows.findAll{ it.key == ['Bob', '" + MISSING_BUCKET_NAME + "'] }", hasSize(1)) // Bob has no last name + .body(".rows.findAll{ it.key[0] == '" + MISSING_BUCKET_NAME + "' }", hasSize(1)) + .body(".rows.find{ it.key == [] }", notNullValue()) //totals + .body(".total", equalTo(5)); + + //Empty buckets verification + //We have only one entry with missing first name {(...)"lastName": "Cooper","age": 60(...)}, so both empty buckets will have the same values + validatableResponse.body(".rows.find{ it.key == ['" + MISSING_BUCKET_NAME + "', 'Cooper'] }.values.value", hasItems(1, 60.0f)); + } + + @ContainerMatrixTest + void testRowAndColumnPivotHasProperMissingBucket() { + final Pivot pivot = Pivot.builder() + .rollup(false) + .series(Count.builder().build(), Average.builder().field("age").build()) + .rowGroups(Values.builder().field("firstName").limit(1).build()) + .columnGroups(Values.builder().field("lastName").limit(1).build()) + .build(); + final ValidatableResponse validatableResponse = execute(pivot); + + validatableResponse + .body(".rows.find{ it.key == ['" + MISSING_BUCKET_NAME + "'] }.values.value", hasItems(1, 60.0f)); + } + + @ContainerMatrixTest + void testMissingBucketIsNotPresentIfItHasZeroValues() { + final Pivot pivot = Pivot.builder() + .rollup(true) + .series(Count.builder().build()) + .rowGroups(Values.builder().field("age").limit(8).build()) + .build(); + final ValidatableResponse validatableResponse = execute(pivot); + + //General verification + validatableResponse + .body(".rows", hasSize(5)) + .body(".total", equalTo(5)) + .body(".rows.find{ it.key == ['60'] }", notNullValue()) + .body(".rows.find{ it.key == ['40'] }", notNullValue()) + .body(".rows.find{ it.key == ['50'] }", notNullValue()) + .body(".rows.find{ it.key == ['80'] }", notNullValue()) + .body(".rows.find{ it.key == [] }", notNullValue()); + + //Empty bucket is not there - in a fixture all the documents have "age" field + validatableResponse.body(".rows.find{ it.key == ['" + MISSING_BUCKET_NAME + "'] }", nullValue()); + + //Top bucket verification + //There are 2 guys of age 60 + validatableResponse.body(".rows[0].key", contains("60")); + validatableResponse.body(".rows[0].values[0].key", contains("count()")); + validatableResponse.body(".rows[0].values[0].value", equalTo(2)); + + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/ClientCertResourceIT.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/ClientCertResourceIT.java new file mode 100644 index 000000000000..ede7fe99d70d --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/ClientCertResourceIT.java @@ -0,0 +1,174 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.datanode; + +import com.jayway.jsonpath.DocumentContext; +import com.jayway.jsonpath.JsonPath; +import io.restassured.response.ValidatableResponse; +import jakarta.annotation.Nonnull; +import jakarta.ws.rs.core.Response; +import org.assertj.core.api.Assertions; +import org.bouncycastle.cert.X509CertificateHolder; +import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; +import org.bouncycastle.openssl.PEMKeyPair; +import org.bouncycastle.openssl.PEMParser; +import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter; +import org.graylog.security.certutil.CertConstants; +import org.graylog.security.certutil.csr.InMemoryKeystoreInformation; +import org.graylog.security.certutil.csr.KeystoreInformation; +import org.graylog.testing.completebackend.apis.GraylogApiResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.security.TruststoreCreator; + +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import java.io.IOException; +import java.io.StringReader; +import java.net.HttpURLConnection; +import java.net.URL; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.SecureRandom; +import java.security.cert.Certificate; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDate; +import java.time.ZoneId; +import java.util.Collections; + +@ContainerMatrixTestsConfiguration(searchVersions = SearchServer.DATANODE_DEV, + additionalConfigurationParameters = { + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_DATANODE_INSECURE_STARTUP", value = "false"), + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_SELFSIGNED_STARTUP", value = "true"), + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_ELASTICSEARCH_HOSTS", value = ""), + }) +public class ClientCertResourceIT { + + private final GraylogApis api; + + public ClientCertResourceIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void generateClientCert() throws Exception { + // these roles are supported: all_access,security_rest_api_access,readall + final ValidatableResponse clientCertResponse = api.post("/ca/clientcert", """ + { + "principal": "admin", + "role": "all_access", + "password": "asdfgh", + "certificate_lifetime": "P6M" + } + """, Response.Status.OK.getStatusCode()); + + final GraylogApiResponse parsedResponse = new GraylogApiResponse(clientCertResponse); + final X509Certificate caCertificate = decodeCert(parsedResponse.properJSONPath().read("ca_certificate")); + final PrivateKey privateKey = decodePrivateKey(parsedResponse.properJSONPath().read("private_key")); + final X509Certificate certificate = decodeCert(parsedResponse.properJSONPath().read("certificate")); + + Assertions.assertThat(certificate.getIssuerX500Principal().getName()).isEqualTo("CN=Graylog CA"); + Assertions.assertThat(certificate.getSubjectX500Principal().getName()).isEqualTo("CN=admin"); + LocalDate expires = certificate.getNotAfter() + .toInstant() + .atZone(ZoneId.systemDefault()) + .toLocalDate(); + LocalDate shouldExpire = Instant.now().plus(Duration.ofDays(180)).atZone(ZoneId.systemDefault()).toLocalDate(); + Assertions.assertThat(expires).isBetween(shouldExpire.minusDays(2), shouldExpire.plusDays(2)); + + final SSLContext sslContext = createSslContext( + createKeystore(privateKey, certificate, caCertificate), + createTruststore(caCertificate)); + + final URL url = new URL("https://" + this.api.backend().searchServerInstance().getHttpHostAddress()); + + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + if (connection instanceof HttpsURLConnection) { + ((HttpsURLConnection) connection).setSSLSocketFactory(sslContext.getSocketFactory()); + + Assertions.assertThat(connection.getResponseCode()).isEqualTo(200); + + final DocumentContext parsedOpensearchResponse = JsonPath.parse(connection.getInputStream()); + final String clusterName = parsedOpensearchResponse.read("cluster_name"); + Assertions.assertThat(clusterName).isEqualTo("datanode-cluster"); + } + } + + @Nonnull + private static SSLContext createSslContext(KeystoreInformation keystore, KeyStore truststore) throws Exception { + KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); + kmf.init(keystore.loadKeystore(), keystore.password()); + SSLContext sc = SSLContext.getInstance("TLS"); + + final TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(truststore); + + sc.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + return sc; + } + + private static KeyStore createTruststore(X509Certificate caCertificate) { + return TruststoreCreator.newEmpty().addCertificates(Collections.singletonList(caCertificate)).getTruststore(); + } + + private static KeystoreInformation createKeystore(PrivateKey privateKey, X509Certificate certificate, X509Certificate caCertificate) throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException { + KeyStore keystore = KeyStore.getInstance(CertConstants.PKCS12); + keystore.load(null, null); + final char[] password = "keystorepassword".toCharArray(); + keystore.setKeyEntry("client", privateKey, password, new Certificate[]{certificate}); + + return new InMemoryKeystoreInformation(keystore, password); + } + + private static X509Certificate decodeCert(String pemEncodedCert) { + final PEMParser pemParser = new PEMParser(new StringReader(pemEncodedCert)); + try { + Object parsed = pemParser.readObject(); + if (parsed instanceof X509CertificateHolder certificate) { + return new JcaX509CertificateConverter().getCertificate(certificate); + } else { + throw new IllegalArgumentException("Couldn't parse x509 certificate from provided string, unknown type"); + } + } catch (IOException | CertificateException e) { + throw new RuntimeException(e); + } + } + + private static PrivateKey decodePrivateKey(String pemEncodedCert) { + final PEMParser pemParser = new PEMParser(new StringReader(pemEncodedCert)); + JcaPEMKeyConverter converter = new JcaPEMKeyConverter(); + try { + Object parsed = pemParser.readObject(); + if (parsed instanceof PEMKeyPair keyPair) { + return converter.getPrivateKey(keyPair.getPrivateKeyInfo()); + } else { + throw new IllegalArgumentException("Couldn't parse private key from provided string, unknown type"); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeOpensearchProxyDisabledAllowlistIT.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeOpensearchProxyDisabledAllowlistIT.java new file mode 100644 index 000000000000..cef705b5a067 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeOpensearchProxyDisabledAllowlistIT.java @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.datanode; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.BeforeEach; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = SearchServer.DATANODE_DEV, additionalConfigurationParameters = {@ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_DATANODE_PROXY_API_ALLOWLIST", value = "false")}) +public class DatanodeOpensearchProxyDisabledAllowlistIT { + private GraylogApis apis; + + @BeforeEach + void setUp(GraylogApis apis) { + this.apis = apis; + } + + @ContainerMatrixTest + void testProtectedPath() { + // this requests the /_search of the underlying opensearch. By default, it's disabled and should return HTTP 400 + // only if we disable the allowlist it should be accessible + apis.get("/datanodes/any/opensearch/_search", 200) + .assertThat().body("_shards.successful", Matchers.greaterThanOrEqualTo(1)); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeOpensearchProxyIT.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeOpensearchProxyIT.java new file mode 100644 index 000000000000..3af073ed9372 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeOpensearchProxyIT.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.datanode; + +import com.github.rholder.retry.RetryException; +import io.restassured.response.ValidatableResponse; +import org.assertj.core.api.Assertions; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Users; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.BeforeEach; + +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ExecutionException; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = SearchServer.DATANODE_DEV, additionalConfigurationParameters = {@ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_DATANODE_PROXY_API_ALLOWLIST", value = "true")}) +public class DatanodeOpensearchProxyIT { + + private GraylogApis apis; + + @BeforeEach + void setUp(GraylogApis apis) { + this.apis = apis; + } + + @ContainerMatrixTest + void testProxyPlaintextGet() throws ExecutionException, RetryException { + final ValidatableResponse response = apis.get("/datanodes/any/opensearch/_cat/indices", 200); + final String responseBody = response.extract().body().asString(); + Assertions.assertThat(responseBody).contains("graylog_0").contains("gl-system-events_0"); + } + + @ContainerMatrixTest + void testProxyJsonGet() { + final ValidatableResponse response = apis.get("/datanodes/any/opensearch/_mapping", 200); + response.assertThat().body("graylog_0.mappings.properties.gl2_accounted_message_size.type", Matchers.equalTo("long")); + } + + @ContainerMatrixTest + void testForbiddenUrl() { + final String message = apis.get("/datanodes/any/opensearch/_search", 400).extract().body().asString(); + Assertions.assertThat(message).contains("This request is not allowed"); + } + + + @ContainerMatrixTest + void testNonAdminUser() { + //HTTP 401/unauthorized for any non-admin user + apis.get("/datanodes/any/opensearch/_search", Users.JOHN_DOE, Collections.emptyMap(),401); + } + + @ContainerMatrixTest + void testTargetSpecificDatanodeInstance() { + final List datanodes = apis.system().datanodes().properJSONPath().read("elements.*.hostname"); + Assertions.assertThat(datanodes).isNotEmpty(); + + final String hostname = datanodes.iterator().next(); + apis.get("/datanodes/" + hostname + "/opensearch/_mapping", 200).assertThat().body("graylog_0.mappings.properties.gl2_accounted_message_size.type", Matchers.equalTo("long")); + } + + @ContainerMatrixTest + void testQueryParameters() { + final ValidatableResponse response = apis.get("/datanodes/any/opensearch/_cluster/settings?include_defaults=true", 200); + response.assertThat().body("defaults.cluster.name", Matchers.equalTo("datanode-cluster")); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeProvisioningIT.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeProvisioningIT.java new file mode 100644 index 000000000000..edaf28887045 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeProvisioningIT.java @@ -0,0 +1,307 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.datanode; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.github.joschi.jadconfig.util.Duration; +import com.github.rholder.retry.Attempt; +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.RetryListener; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import io.restassured.common.mapper.TypeRef; +import io.restassured.http.ContentType; +import io.restassured.response.ValidatableResponse; +import jakarta.annotation.Nonnull; +import jakarta.ws.rs.core.MediaType; +import org.apache.commons.lang.RandomStringUtils; +import org.apache.http.HttpStatus; +import org.assertj.core.api.Assertions; +import org.graylog.security.certutil.CertConstants; +import org.graylog.security.certutil.CertutilCa; +import org.graylog.security.certutil.console.TestableConsole; +import org.graylog.testing.completebackend.ContainerizedGraylogBackend; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.restoperations.DatanodeOpensearchWait; +import org.graylog.testing.restoperations.RestOperationParameters; +import org.graylog2.cluster.nodes.DataNodeStatus; +import org.graylog2.cluster.preflight.DataNodeProvisioningConfig; +import org.graylog2.security.IndexerJwtAuthTokenProvider; +import org.graylog2.security.JwtSecret; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.security.auth.x500.X500Principal; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.file.Path; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.Certificate; +import java.security.cert.CertificateException; +import java.security.cert.CertificateFactory; +import java.security.cert.X509Certificate; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.not; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = SearchServer.DATANODE_DEV, + additionalConfigurationParameters = { + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_DATANODE_INSECURE_STARTUP", value = "false"), + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_ELASTICSEARCH_HOSTS", value = ""), + }) +public class DatanodeProvisioningIT { + + private final Logger log = LoggerFactory.getLogger(DatanodeProvisioningIT.class); + + private final GraylogApis apis; + + @TempDir + private Path tempDir; + private BasicAuthCredentials basicAuth; + + public DatanodeProvisioningIT(GraylogApis apis) { + this.apis = apis; + } + + @BeforeEach + void setUp() { + basicAuth = extractBasicAuthFromLogs(apis.backend().getLogs()); + } + + @AfterEach + void tearDown() { + resetPreflight(); + } + + @ContainerMatrixTest + void provisionDatanodeGenerateCA() throws ExecutionException, RetryException, KeyStoreException, CertificateException, IOException, NoSuchAlgorithmException { + final String caSubjectName = createSelfSignedCA(); + + configureAutomaticCertRenewalPolicy(); + triggerDatanodeProvisioning(); + // Wait till all the datanodes become CONNECTED (=stated with certificates) + final List connectedDatanodes = waitForDatanodesConnected(basicAuth); + // verify that we have one connected datanode + Assertions.assertThat(connectedDatanodes) + .hasSize(1); + + final KeyStore truststore = keystoreFromApiCertificate(); + verifySubjectName(truststore, caSubjectName); + + testEncryptedConnectionToOpensearch(truststore); + } + + private static void verifySubjectName(KeyStore truststore, String caSubjectName) throws KeyStoreException { + final X500Principal subject = ((X509Certificate) truststore.getCertificate("ca")).getSubjectX500Principal(); + Assertions.assertThat(subject.getName()).isEqualTo("CN=" + caSubjectName); + } + + private void testEncryptedConnectionToOpensearch(KeyStore truststore) throws ExecutionException, RetryException, KeyStoreException, CertificateException, IOException, NoSuchAlgorithmException { + try { + new DatanodeOpensearchWait(RestOperationParameters.builder() + .port(getOpensearchPort()) + .truststore(truststore) + .jwtTokenProvider(new IndexerJwtAuthTokenProvider(new JwtSecret(ContainerizedGraylogBackend.PASSWORD_SECRET), Duration.seconds(120), Duration.seconds(60))) + .build()) + .waitForNodesCount(1); + } catch (Exception e) { + log.error("Could not connect to Opensearch\n" + apis.backend().getSearchLogs()); + throw e; + } + } + + private List waitForDatanodesConnected(BasicAuthCredentials basicAuth) throws ExecutionException, RetryException { + List connectedDatanodes = null; + try { + connectedDatanodes = RetryerBuilder.>newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(1, TimeUnit.SECONDS)) + .withStopStrategy(StopStrategies.stopAfterAttempt(60)) + .withRetryListener(new RetryListener() { + @Override + public void onRetry(Attempt attempt) { + if (attempt.hasResult()) { + log.info(String.valueOf(attempt.getResult())); + } + } + }) + .retryIfResult(list -> list.isEmpty() || !list.stream().allMatch(node -> + node.status().equals(DataNodeProvisioningConfig.State.CONNECTED.name()) && + node.dataNodeStatus().equals(DataNodeStatus.AVAILABLE.name()) + )) + .build() + .call(this::getDatanodes); + } catch (ExecutionException | RetryException | IllegalStateException e) { + log.error("Datanode not started:\n" + apis.backend().getSearchLogs()); + throw e; + } + return connectedDatanodes; + } + + private ValidatableResponse triggerDatanodeProvisioning() { + return given() + .spec(apis.requestSpecification()) + .body("") + .auth().basic(basicAuth.username, basicAuth.password) + .post("/generate") + .then() + .statusCode(HttpStatus.SC_NO_CONTENT); + } + + private String createSelfSignedCA() { + String subject = "Graylog CA generated " + RandomStringUtils.randomAlphanumeric(10); + given() + .spec(apis.requestSpecification()) + .body("{\"organization\":\"" + subject + "\"}") + .auth().basic(basicAuth.username, basicAuth.password) + .post("/ca/create") + .then() + .statusCode(HttpStatus.SC_CREATED); + return subject; + } + + private void resetPreflight() { + given() + .spec(apis.requestSpecification()) + .auth().basic(basicAuth.username, basicAuth.password) + .delete("/startOver") + .then() + .log().ifStatusCodeMatches(Matchers.not(not(HttpStatus.SC_NO_CONTENT))) + .statusCode(HttpStatus.SC_NO_CONTENT); + } + + private ValidatableResponse configureAutomaticCertRenewalPolicy() { + return given() + .spec(apis.requestSpecification()) + .body("{\"mode\":\"Automatic\",\"certificate_lifetime\":\"P30D\"}") + .auth().basic(basicAuth.username, basicAuth.password) + .post("/renewal_policy") + .then() + .statusCode(HttpStatus.SC_NO_CONTENT); + } + + @ContainerMatrixTest + void provisionDatanodeUploadCA() throws ExecutionException, RetryException, CertificateException, KeyStoreException, IOException, NoSuchAlgorithmException { + final Path caKeystore = createCA(); + + uploadCA(caKeystore); + configureAutomaticCertRenewalPolicy(); + triggerDatanodeProvisioning(); + // Wait till all the datanodes become CONNECTED (=stated with certificates) + final List connectedDatanodes = waitForDatanodesConnected(basicAuth); + // verify that we have one connected datanode + Assertions.assertThat(connectedDatanodes) + .hasSize(1); + + final KeyStore truststore = keystoreFromApiCertificate(); + verifySubjectName(truststore, CertutilCa.DEFAULT_ORGANIZATION_NAME); + + testEncryptedConnectionToOpensearch(truststore); + } + + private ValidatableResponse uploadCA(Path caKeystore) { + return given() + .spec(apis.requestSpecification()) + .auth().basic(basicAuth.username, basicAuth.password) + .contentType(MediaType.MULTIPART_FORM_DATA) + .multiPart("files", caKeystore.toFile()) + .multiPart("password", "my-secret-password") + .post("/ca/upload") + .then() + .statusCode(HttpStatus.SC_OK); + } + + private Path createCA() { + final Path certPath = tempDir.resolve("test-ca.p12"); + + final TestableConsole input = TestableConsole.empty() + .register(CertutilCa.PROMPT_ENTER_CA_PASSWORD, "my-secret-password"); + + final CertutilCa command = new CertutilCa(certPath.toAbsolutePath().toString(), input); + command.run(); + return certPath; + } + + @Nonnull + private KeyStore keystoreFromApiCertificate() throws KeyStoreException, CertificateException, IOException, NoSuchAlgorithmException { + final byte[] certificate = given() + .spec(apis.requestSpecification()) + .auth().basic(basicAuth.username, basicAuth.password) + .accept(ContentType.TEXT) + .get("/ca/certificate") + .then().extract().body().asByteArray(); + final KeyStore keystore = KeyStore.getInstance(CertConstants.PKCS12); + keystore.load(null, null); + CertificateFactory cf = CertificateFactory.getInstance("X.509"); + Certificate cert = cf.generateCertificate(new ByteArrayInputStream(certificate)); + keystore.setCertificateEntry("ca", cert); + return keystore; + } + + private int getOpensearchPort() { + final String indexerHostAddress = apis.backend().searchServerInstance().getHttpHostAddress(); + return Integer.parseInt(indexerHostAddress.split(":")[1]); + } + + private List getDatanodes() { + return given() + .spec(apis.requestSpecification()) + .auth().basic(basicAuth.username, basicAuth.password) + .get("/data_nodes") + .then() + .extract().body().as(new TypeRef>() {}); + + } + + private BasicAuthCredentials extractBasicAuthFromLogs(String logs) { + final Pattern pattern = Pattern.compile("Initial configuration is accessible at .+ with username '(.+)' and password '(.+)'", Pattern.MULTILINE); + final Matcher matcher = pattern.matcher(logs); + if (matcher.find()) { + return new BasicAuthCredentials(matcher.group(1), matcher.group(2)); + } else { + throw new IllegalStateException("Couldn't find preflight auth credentials in logs: " + logs); + } + } + + private record BasicAuthCredentials(String username, String password) {} + + private record DatanodeStatus( + @JsonProperty("node_id") String nodeId, + @JsonProperty("transport_address") String transportAddress, + @JsonProperty("status") String status, + @JsonProperty("error_msg") String errorMsg, + @JsonProperty("hostname") String hostname, + @JsonProperty("short_node_id") String shortNodeId, + @JsonProperty("data_node_status") String dataNodeStatus + ) { + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeRestProxyIT.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeRestProxyIT.java new file mode 100644 index 000000000000..358dad5a4da3 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeRestProxyIT.java @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.datanode; + +import io.restassured.response.ValidatableResponse; +import org.assertj.core.api.Assertions; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.BeforeEach; + +import java.util.List; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = SearchServer.DATANODE_DEV, additionalConfigurationParameters = {@ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_DATANODE_PROXY_API_ALLOWLIST", value = "false")}) +public class DatanodeRestProxyIT { + + private GraylogApis apis; + + @BeforeEach + void setUp(GraylogApis apis) { + this.apis = apis; + } + + @ContainerMatrixTest + void testTargetAllDatanodeInstance() { + final List datanodes = apis.system().datanodes().properJSONPath().read("elements.*.hostname"); + + final ValidatableResponse res = apis.get("/datanodes/all/rest/", 200); + + for (String hostname : datanodes) { + final String jsonpath = hostname + ".opensearch.node.rest_base_url"; + final String expectedRestUrl = "http://" + hostname + ":9200"; + res.assertThat().body(jsonpath, Matchers.equalTo(expectedRestUrl)); + } + } + + @ContainerMatrixTest + void testTargetAnyDatanodeInstance() { + apis.get("/datanodes/any/rest/", 200) + .assertThat().body("opensearch.node.node_name", Matchers.equalTo("indexer")); + } + + @ContainerMatrixTest + void testTargetSpecificDatanodeInstance() { + final List datanodes = apis.system().datanodes().properJSONPath().read("elements.*.hostname"); + Assertions.assertThat(datanodes).isNotEmpty(); + + final String hostname = datanodes.iterator().next(); + apis.get("/datanodes/" + hostname + "/rest/", 200) + .assertThat().body("opensearch.node.node_name", Matchers.equalTo("indexer")); + + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeSelfsignedStartupIT.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeSelfsignedStartupIT.java new file mode 100644 index 000000000000..602565eed7b4 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/DatanodeSelfsignedStartupIT.java @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.datanode; + +import com.github.joschi.jadconfig.util.Duration; +import com.github.rholder.retry.RetryException; +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.ContainerizedGraylogBackend; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.restoperations.DatanodeOpensearchWait; +import org.graylog.testing.restoperations.RestOperationParameters; +import org.graylog2.security.IndexerJwtAuthTokenProvider; +import org.graylog2.security.JwtSecret; +import org.hamcrest.Matchers; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ExecutionException; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, searchVersions = SearchServer.DATANODE_DEV, + additionalConfigurationParameters = { + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_DATANODE_INSECURE_STARTUP", value = "false"), + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_SELFSIGNED_STARTUP", value = "true"), + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_ELASTICSEARCH_HOSTS", value = ""), + }) +public class DatanodeSelfsignedStartupIT { + + + private final Logger log = LoggerFactory.getLogger(DatanodeProvisioningIT.class); + + private final GraylogApis apis; + + public DatanodeSelfsignedStartupIT(GraylogApis apis) { + this.apis = apis; + } + + @ContainerMatrixTest + public void testSelfsignedStartup() throws ExecutionException, RetryException { + testEncryptedConnectionToOpensearch(); + } + + + private int getOpensearchPort() { + final String indexerHostAddress = apis.backend().searchServerInstance().getHttpHostAddress(); + return Integer.parseInt(indexerHostAddress.split(":")[1]); + } + + private void testEncryptedConnectionToOpensearch() throws ExecutionException, RetryException { + try { + final ValidatableResponse response = new DatanodeOpensearchWait(RestOperationParameters.builder() + .port(getOpensearchPort()) + .relaxedHTTPSValidation(true) + .jwtTokenProvider(new IndexerJwtAuthTokenProvider(new JwtSecret(ContainerizedGraylogBackend.PASSWORD_SECRET), Duration.seconds(120), Duration.seconds(60))) + .build()) + .waitForNodesCount(1); + + response.assertThat().body("status", Matchers.equalTo("green")); + } catch (Exception e) { + log.error("Could not connect to Opensearch\n" + apis.backend().getSearchLogs()); + throw e; + } + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/RemoteReindexingMigrationIT.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/RemoteReindexingMigrationIT.java new file mode 100644 index 000000000000..c3c54a7249b7 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/datanode/RemoteReindexingMigrationIT.java @@ -0,0 +1,197 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.datanode; + +import com.github.rholder.retry.Attempt; +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.RetryListener; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import io.restassured.response.ValidatableResponse; +import org.apache.commons.lang.RandomStringUtils; +import org.assertj.core.api.Assertions; +import org.graylog.shaded.opensearch2.org.opensearch.action.index.IndexRequest; +import org.graylog.shaded.opensearch2.org.opensearch.action.index.IndexResponse; +import org.graylog.shaded.opensearch2.org.opensearch.client.indices.CloseIndexRequest; +import org.graylog.shaded.opensearch2.org.opensearch.client.indices.CreateIndexRequest; +import org.graylog.shaded.opensearch2.org.opensearch.client.indices.CreateIndexResponse; +import org.graylog.storage.opensearch2.testing.OpenSearchInstance; +import org.graylog.storage.opensearch2.testing.OpenSearchInstanceBuilder; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.elasticsearch.IndexState; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +@ContainerMatrixTestsConfiguration(searchVersions = SearchServer.DATANODE_DEV) +public class RemoteReindexingMigrationIT { + + private static final Logger LOG = LoggerFactory.getLogger(RemoteReindexingMigrationIT.class); + + private GraylogApis apis; + private OpenSearchInstance openSearchInstance; + + @BeforeEach + void setUp(GraylogApis apis) { + this.apis = apis; + openSearchInstance = OpenSearchInstanceBuilder.builder() + .network(apis.backend().network()) + .hostname("existing-opensearch-cluster") + .build(); + } + + @AfterEach + void tearDown() { + this.openSearchInstance.close(); + } + + + @ContainerMatrixTest + void testRemoteAsyncReindexing() throws ExecutionException, RetryException { + + final String indexName = createRandomSourceIndex(); + final String indexName2 = createRandomSourceIndex(); + + final String messageContent = ingestRandomMessage(indexName); + final String messageContent2 = ingestRandomMessage(indexName2); + + closeSourceIndex(indexName); + + createTargetIndex(indexName, true); + createTargetIndex(indexName2, false); + blockTargetIndex(indexName2); + + Assertions.assertThat(getTargetIndexState(indexName)) + .isEqualTo(IndexState.CLOSE); + + // flush the newly created document + openSearchInstance.client().refreshNode(); + + final String request = """ + { + "allowlist": "%s", + "hostname": "%s", + "indices": ["%s", "%s"], + "synchronous": false + } + """.formatted(openSearchInstance.internalUri(), openSearchInstance.internalUri(), indexName, indexName2); + + + final ValidatableResponse migrationResponse = apis.post("/remote-reindex-migration/remoteReindex", request, 200); + final String migrationID = migrationResponse.extract().body().asString(); + + ValidatableResponse response = waitForMigrationFinished(migrationID); + + final String status = response.extract().body().jsonPath().get("status"); + Assertions.assertThat(status).isEqualTo("FINISHED"); + + Assertions.assertThat(getTargetIndexState(indexName)) + .isEqualTo(IndexState.CLOSE); + + + openTargetIndex(indexName); + + Assertions.assertThat(waitForMessage(indexName, messageContent)).containsEntry("message", messageContent); + Assertions.assertThat(waitForMessage(indexName2, messageContent2)).containsEntry("message", messageContent2); + + + } + + private void blockTargetIndex(String indexName) { + apis.backend().searchServerInstance().client().setIndexBlock(indexName); + } + + private void openTargetIndex(String indexName) { + apis.backend().searchServerInstance().client().openIndex(indexName); + } + + private IndexState getTargetIndexState(String indexName) { + return apis.backend().searchServerInstance().client().getStatus(indexName); + } + + private void createTargetIndex(String indexName, boolean closed) { + apis.backend().searchServerInstance().client().createIndex(indexName); + if (closed) { + apis.backend().searchServerInstance().client().closeIndex(indexName); + } + } + + private void closeSourceIndex(String indexName) { + openSearchInstance.openSearchClient().execute((restHighLevelClient, requestOptions) -> restHighLevelClient.indices().close(new CloseIndexRequest(indexName), requestOptions)); + } + + /** + * @return name of the newly created index + */ + private String createRandomSourceIndex() { + String indexName = RandomStringUtils.randomAlphanumeric(15).toLowerCase(Locale.ROOT); + final CreateIndexResponse response = openSearchInstance.openSearchClient().execute((restHighLevelClient, requestOptions) -> restHighLevelClient.indices().create(new CreateIndexRequest(indexName), requestOptions)); + return response.index(); + } + + /** + * @return content of the created message. Useful for later verification that the message has been successfully + * transferred from old to new cluster. + */ + private String ingestRandomMessage(String indexName) { + String messageContent = RandomStringUtils.randomAlphanumeric(20); + final IndexResponse response = openSearchInstance.openSearchClient().execute((restHighLevelClient, requestOptions) -> { + final IndexRequest req = new IndexRequest(); + req.index(indexName); + req.source(Map.of("message", messageContent)); + return restHighLevelClient.index(req, requestOptions); + }); + return messageContent; + } + + private ValidatableResponse waitForMigrationFinished(String migrationID) throws ExecutionException, RetryException { + return RetryerBuilder.newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(1, TimeUnit.SECONDS)) + .withStopStrategy(StopStrategies.stopAfterAttempt(120)) + .retryIfResult(r -> !r.extract().body().jsonPath().get("status").equals("FINISHED")) + .withRetryListener(new RetryListener() { + @Override + public void onRetry(Attempt attempt) { + if (attempt.hasResult()) { + final String status = ((ValidatableResponse) attempt.getResult()).extract().body().asString(); + LOG.info("Current reindex status: " + status); + } + } + }).build() + .call(() -> apis.get("/remote-reindex-migration/status/" + migrationID, 200)); + } + + private Map waitForMessage(String indexName, String messageContent) throws ExecutionException, RetryException { + return RetryerBuilder.>>newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(1, TimeUnit.SECONDS)) + .withStopStrategy(StopStrategies.stopAfterAttempt(120)) + .retryIfResult(Optional::isEmpty) + .build().call(() -> apis.backend().searchServerInstance().client().findMessage(indexName, "message:" + messageContent)) + .orElseThrow(() -> new IllegalStateException("Message should be present!")); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/searchbackend/elasticsearch/e2e/ElasticsearchE2E.java b/full-backend-tests/src/test/java/org/graylog/searchbackend/elasticsearch/e2e/ElasticsearchE2E.java new file mode 100644 index 000000000000..6f8228615637 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/searchbackend/elasticsearch/e2e/ElasticsearchE2E.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.searchbackend.elasticsearch.e2e; + +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.utils.GelfInputUtils; +import org.graylog.testing.utils.SearchUtils; + +import java.util.List; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.graylog.testing.graylognode.NodeContainerConfig.GELF_HTTP_PORT; + +@ContainerMatrixTestsConfiguration +public class ElasticsearchE2E { + private final GraylogApis api; + + public ElasticsearchE2E(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void inputMessageCanBeSearched() { + int mappedPort = this.api.backend().mappedPortFor(GELF_HTTP_PORT); + + GelfInputUtils.createGelfHttpInput(mappedPort, GELF_HTTP_PORT, api.requestSpecificationSupplier()); + + GelfInputUtils.postMessage(mappedPort, + "{\"short_message\":\"kram\", \"host\":\"example.org\", \"facility\":\"test\"}", + api.requestSpecificationSupplier()); + + List messages = SearchUtils.searchForAllMessages(this.api.requestSpecificationSupplier()); + assertThat(messages).doesNotContain("Hello there"); + + GelfInputUtils.postMessage(mappedPort, + "{\"short_message\":\"Hello there\", \"host\":\"example.org\", \"facility\":\"test\"}", + api.requestSpecificationSupplier()); + + assertThat(SearchUtils.waitForMessage(this.api.requestSpecificationSupplier(), "Hello there")).isTrue(); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/shared/system/stats/SystemStatsIT.java b/full-backend-tests/src/test/java/org/graylog/shared/system/stats/SystemStatsIT.java new file mode 100644 index 000000000000..42a1174e85c6 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/shared/system/stats/SystemStatsIT.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.shared.system.stats; + +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; +import org.junit.jupiter.api.condition.DisabledOnOs; +import org.junit.jupiter.api.condition.OS; + +import java.util.HashMap; +import java.util.Map; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; + +@DisabledIfEnvironmentVariable(named = "GITHUB_WORKSPACE", matches = ".+") +@ContainerMatrixTestsConfiguration +public class SystemStatsIT { + private final GraylogApis api; + + public SystemStatsIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + @DisabledOnOs(OS.MAC) + void filesystemStats() { + final Map filesystems = given() + .spec(api.requestSpecification()) + .when() + .get("/system/stats") + .then() + .statusCode(200) + .extract().jsonPath().getMap("fs.filesystems"); + + assertThat(filesystems).isNotEmpty(); + assertThat(filesystems.get("/usr/share/graylog/data/journal")).satisfies(entry -> + assertThat(((HashMap) entry).get("mount")).isEqualTo("/")); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/backenddriver/SearchDriver.java b/full-backend-tests/src/test/java/org/graylog/testing/backenddriver/SearchDriver.java new file mode 100644 index 000000000000..897621c98a08 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/backenddriver/SearchDriver.java @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.backenddriver; + +import com.google.common.collect.ImmutableSet; +import io.restassured.path.json.JsonPath; +import io.restassured.specification.RequestSpecification; +import org.bson.types.ObjectId; +import org.graylog.plugins.views.search.elasticsearch.ElasticsearchQueryString; +import org.graylog.plugins.views.search.rest.MappedFieldTypeDTO; +import org.graylog.plugins.views.search.rest.QueryDTO; +import org.graylog.plugins.views.search.rest.SearchDTO; +import org.graylog.plugins.views.search.searchtypes.MessageList; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.utils.JsonUtils; +import org.graylog.testing.utils.RangeUtils; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Supplier; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.notNullValue; + +/** + * WIP. This class illustrates how we could reuse common functionality for integration tests. + * It might make sense to configure it with @link io.restassured.specification.RequestSpecification + * and whatever else it might need in the future and make it injectable into the test class via + * + * @link org.graylog.testing.completebackend.GraylogBackendExtension + * We should do that later, if we find that implementing more functionality here is useful and feasible. + */ +public class SearchDriver { + + private static final Logger LOG = LoggerFactory.getLogger(SearchDriver.class); + + /** + * @param Supplier to get the RequestSpecification from + * @return all messages' "message" field as List + */ + public static List searchAllMessages(Supplier spec) { + return searchAllMessagesInTimeRange(spec, RangeUtils.allMessagesTimeRange()); + } + + public static List searchAllMessagesInTimeRange(Supplier spec, TimeRange timeRange) { + String queryId = "query-id"; + String messageListId = "message-list-id"; + + String body = allMessagesJson(queryId, messageListId, timeRange); + + final JsonPath response = given() + .spec(spec.get()) + .when() + .body(body) + .post("/views/search/sync") + .then() + .statusCode(200) + .assertThat().body("execution.completed_exceptionally", notNullValue()) + .extract().body().jsonPath(); + + if (response.get("execution.completed_exceptionally")) { + final Object errors = response.getString("errors"); + LOG.warn("Failed to obtain messages: {}", errors); + } + + return response.getList(allMessagesJsonPath(queryId, messageListId), String.class); + } + + private static String allMessagesJson(String queryId, String messageListId, TimeRange timeRange) { + MessageList messageList = MessageList.builder().id(messageListId).build(); + QueryDTO q = QueryDTO.builder() + .id(queryId) + .query(ElasticsearchQueryString.of("")) + .timerange(timeRange) + .searchTypes(ImmutableSet.of(messageList)) + .build(); + SearchDTO s = SearchDTO.builder() + .id(new ObjectId().toHexString()) + .queries(q) + .build(); + + return JsonUtils.toJsonString(s); + } + + @SuppressWarnings("SameParameterValue") + private static String allMessagesJsonPath(String queryId, String messageListId) { + return "results." + queryId + ".search_types." + messageListId + ".messages.message.message"; + } + + public static List getFieldTypes(Supplier spec) { + final MappedFieldTypeDTO[] as = given() + .spec(spec.get()) + .get("/views/fields") + .as(MappedFieldTypeDTO[].class); + return Arrays.asList(as); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/AutomaticLeaderElectionStartupIT.java b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/AutomaticLeaderElectionStartupIT.java new file mode 100644 index 000000000000..b76f0a41414c --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/AutomaticLeaderElectionStartupIT.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.fullbackend; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import static io.restassured.RestAssured.given; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, additionalConfigurationParameters = { + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_LEADER_ELECTION_MODE", value = "automatic") +}) +class AutomaticLeaderElectionStartupIT { + private final GraylogApis api; + + public AutomaticLeaderElectionStartupIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void canReachApi() { + given() + .config(api.withGraylogBackendFailureConfig()) + .spec(api.requestSpecification()) + .when() + .get() + .then() + .statusCode(200); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/BackendStartupIT.java b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/BackendStartupIT.java new file mode 100644 index 000000000000..d698873b831b --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/BackendStartupIT.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.fullbackend; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.MailServerInstance; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.utils.SearchUtils; +import org.hamcrest.Matchers; + +import java.util.List; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, withMailServerEnabled = true) +class BackendStartupIT { + private final GraylogApis api; + + public BackendStartupIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void canReachApi() { + given() + .config(api.withGraylogBackendFailureConfig()) + .spec(api.requestSpecification()) + .when() + .get() + .then() + .statusCode(200); + } + + @ContainerMatrixTest + void loadsDefaultPlugins() { + List pluginNames = + given() + .spec(api.requestSpecification()) + .when() + .get("/system/plugins") + .then() + .statusCode(200) + .extract().jsonPath() + .getList("plugins.name"); + + assertThat(pluginNames).containsAnyOf( + "Elasticsearch 6 Support", + "Elasticsearch 7 Support", + "Threat Intelligence Plugin" + ); + } + + @ContainerMatrixTest + void importsElasticsearchFixtures() { + this.api.backend().importElasticsearchFixture("one-message.json", getClass()); + assertThat(SearchUtils.waitForMessage(this.api.requestSpecificationSupplier(), "hello from es fixture")).isTrue(); + } + + @ContainerMatrixTest + void startsMailServer() { + final MailServerInstance mailServer = this.api.backend().getEmailServerInstance().orElseThrow(() -> new IllegalStateException("Mail server should be accessible")); + given() + .get(mailServer.getEndpointURI() + "/api/v2/messages") + .then() + .assertThat().body("count", Matchers.equalTo(0)); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/FiltersIT.java b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/FiltersIT.java new file mode 100644 index 000000000000..98efa19ebe95 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/FiltersIT.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.fullbackend; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.shared.rest.resources.csp.CSP; +import org.graylog2.shared.rest.resources.csp.CSPResources; +import org.graylog2.shared.rest.resources.csp.CSPResponseFilter; +import org.hamcrest.Matchers; + +import java.util.regex.Pattern; + +import static io.restassured.RestAssured.given; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, withMailServerEnabled = true) +public class FiltersIT { + private static final String DEFAULT_CONNECT_SRC = "connect-src 'self' https://graylog.org/post/tag/ https://telemetry.graylog.cloud;"; + private final GraylogApis api; + private final CSPResources cspResources; + private final Pattern defaultCSPPattern; + + public FiltersIT(GraylogApis api) { + this.api = api; + this.cspResources = new CSPResources(); + this.defaultCSPPattern = Pattern.compile(Pattern.quote(DEFAULT_CONNECT_SRC + cspResources.cspString(CSP.DEFAULT)) + .replaceAll("\\{nonce}", "\\\\E[a-zA-Z0-9-]+\\\\Q")); + } + + @ContainerMatrixTest + void cspDocumentationBrowser() { + String expected = cspResources.cspString(CSP.SWAGGER); + given() + .spec(api.requestSpecification()) + .when() + .get("/api-browser") + .then() + .statusCode(200) + .assertThat().header(CSPResponseFilter.CSP_HEADER, + Matchers.containsString(expected)); + } + + @ContainerMatrixTest + void cspWebInterfaceAssets() { + given() + .spec(api.requestSpecification()) + .basePath("/") + .when() + .get() + .then() + .statusCode(200) + .assertThat().header(CSPResponseFilter.CSP_HEADER, + Matchers.matchesPattern(defaultCSPPattern)); + } + + @ContainerMatrixTest + void cspWebAppNotFound() { + given() + .spec(api.requestSpecification()) + .basePath("/") + .when() + .get("streams") + .then() + .assertThat().header(CSPResponseFilter.CSP_HEADER, + Matchers.matchesPattern(defaultCSPPattern)); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/MongoDBFixturesWithClassLifecycleIT.java b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/MongoDBFixturesWithClassLifecycleIT.java new file mode 100644 index 000000000000..feab73da47d3 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/MongoDBFixturesWithClassLifecycleIT.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.fullbackend; + +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.util.List; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; +import static org.graylog.testing.completebackend.Lifecycle.CLASS; + +@ContainerMatrixTestsConfiguration(serverLifecycle = CLASS) +class MongoDBFixturesWithClassLifecycleIT { + private final GraylogApis api; + + public MongoDBFixturesWithClassLifecycleIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void importMongoFixtures() { + this.api.backend().importMongoDBFixture("access-token.json", MongoDBFixturesWithClassLifecycleIT.class); + } + + @ContainerMatrixTest + void oneTokenPresentWithTestMethodA() { + assertTokenPresent(); + } + + @ContainerMatrixTest + void oneTokenPresentWithTestMethodB() { + assertTokenPresent(); + } + + private void assertTokenPresent() { + List tokens = given() + .config(api.withGraylogBackendFailureConfig()) + .spec(api.requestSpecification()) + .when() + .get("users/local:admin/tokens") + .then() + .statusCode(200) + .extract() + .jsonPath() + .getList("tokens"); + + assertThat(tokens).hasSize(1); + } + +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/MongoDBFixturesWithVMLifecycleIT.java b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/MongoDBFixturesWithVMLifecycleIT.java new file mode 100644 index 000000000000..1a57efa2b08e --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/fullbackend/MongoDBFixturesWithVMLifecycleIT.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.fullbackend; + +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.junit.jupiter.api.BeforeAll; + +import java.util.List; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; +import static org.graylog.testing.completebackend.Lifecycle.VM; + +@ContainerMatrixTestsConfiguration(serverLifecycle = VM) +class MongoDBFixturesWithVMLifecycleIT { + private final GraylogApis api; + + public MongoDBFixturesWithVMLifecycleIT(GraylogApis api) { + this.api = api; + } + + @BeforeAll + public void importMongoFixtures() { + this.api.backend().importMongoDBFixture("access-token.json", MongoDBFixturesWithVMLifecycleIT.class); + } + + @ContainerMatrixTest + void oneTokenPresentWithTestMethodA() { + assertTokenPresent(); + } + + @ContainerMatrixTest + void oneTokenPresentWithTestMethodB() { + assertTokenPresent(); + } + + private void assertTokenPresent() { + List tokens = given() + .config(api.withGraylogBackendFailureConfig()) + .spec(api.requestSpecification()) + .when() + .get("users/local:admin/tokens") + .then() + .statusCode(200) + .extract() + .jsonPath() + .getList("tokens"); + + assertThat(tokens).hasSize(1); + } + +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/EmailUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/EmailUtils.java new file mode 100644 index 000000000000..2349710e70ed --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/EmailUtils.java @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import io.restassured.path.json.JsonPath; + +import java.net.URI; +import java.time.Duration; +import java.util.Optional; + +import static io.restassured.RestAssured.given; + +public class EmailUtils { + + public static JsonPath waitForMessage(URI endpointURI, final String searchQuery, Duration timeout) { + return WaitUtils.waitForObject(() -> { + final JsonPath searchResponse = given() + .get(endpointURI.toString() + "/api/v2/search?kind=containing&query=" + searchQuery) + .body() + .jsonPath(); + + if (searchResponse.getInt("count") >= 1) { + return Optional.of(searchResponse); + } else { + return Optional.empty(); + } + }, "Timed out waiting for a message", timeout); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/GelfInputUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/GelfInputUtils.java new file mode 100644 index 000000000000..5edf4e02ad7d --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/GelfInputUtils.java @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import com.google.common.collect.ImmutableMap; +import io.restassured.specification.RequestSpecification; +import org.graylog2.inputs.gelf.http.GELFHttpInput; +import org.graylog2.rest.models.system.inputs.requests.InputCreateRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.function.Supplier; + +import static io.restassured.RestAssured.given; + +public final class GelfInputUtils { + + private static final Logger LOG = LoggerFactory.getLogger(GelfInputUtils.class); + + private GelfInputUtils() { + } + + public static void createGelfHttpInput(int mappedPort, int gelfHttpPort, Supplier spec) { + + final ArrayList inputs = given() + .spec(spec.get()) + .expect() + .response() + .statusCode(200) + .when() + .get("/system/inputstates") + .body().jsonPath().get("states.message_input.attributes.port"); + + if (!inputs.contains(gelfHttpPort)) { + InputCreateRequest request = InputCreateRequest.create( + "Integration test GELF input", + GELFHttpInput.class.getName(), + true, + ImmutableMap.of("bind_address", "0.0.0.0", "port", gelfHttpPort), + null); + + given() + .spec(spec.get()) + .body(request) + .expect().response().statusCode(201) + .when() + .post("/system/inputs"); + } + + waitForGelfInputOnPort(mappedPort, spec); + } + + private static void waitForGelfInputOnPort(int mappedPort, Supplier spec) { + WaitUtils.waitFor( + () -> gelfInputIsListening(mappedPort, spec), + "Timed out waiting for GELF input listening on port " + mappedPort); + } + + private static boolean gelfInputIsListening(int mappedPort, Supplier spec) { + try { + gelfEndpoint(mappedPort, spec) + .expect().response().statusCode(200) + .when() + .options(); + LOG.info("GELF input listening on port {}", mappedPort); + return true; + } catch (Exception e) { + return false; + } + } + + private static RequestSpecification gelfEndpoint(int mappedPort, Supplier spec) { + return given() + .spec(spec.get()) + .basePath("/gelf") + .port(mappedPort); + } + + public static void postMessage(int mappedPort, + @SuppressWarnings("SameParameterValue") String messageJson, + Supplier spec) { + gelfEndpoint(mappedPort, spec) + .body(messageJson) + .expect().response().statusCode(202) + .when() + .post(); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/IndexSetUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/IndexSetUtils.java new file mode 100644 index 000000000000..9bb7607663db --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/IndexSetUtils.java @@ -0,0 +1,94 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import io.restassured.specification.RequestSpecification; +import org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig; +import org.graylog2.indexer.rotation.strategies.TimeBasedRotationStrategyConfig; +import org.graylog2.rest.resources.system.indexer.responses.IndexSetSummary; +import org.joda.time.Duration; +import org.joda.time.Period; + +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.function.Supplier; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.notNullValue; + +public class IndexSetUtils { + private IndexSetUtils() { + } + + public static String defaultIndexSetId(Supplier spec) { + return given() + .spec(spec.get()) + .when() + .get("/system/indices/index_sets") + .then() + .statusCode(200) + .assertThat() + .extract().body().jsonPath().getString("index_sets.find { it.default == true }.id"); + } + + public static String createIndexSet(Supplier spec, IndexSetSummary indexSetSummary) { + return given() + .spec(spec.get()) + .log().ifValidationFails() + .when() + .body(indexSetSummary) + .post("/system/indices/index_sets") + .then() + .log().ifError() + .log().ifValidationFails() + .statusCode(200) + .assertThat().body("id", notNullValue()) + .extract().body().jsonPath().getString("id"); + } + + public static String createIndexSet(Supplier spec, String title, String description, String prefix) { + var indexSetSummary = IndexSetSummary.create(null, + title, + description, + false, + true, + false, + prefix, + 4, + 0, + "org.graylog2.indexer.rotation.strategies.TimeBasedRotationStrategy", + TimeBasedRotationStrategyConfig.builder() + .rotationPeriod(Period.days(1)) + .rotateEmptyIndexSet(false) + .build(), + "org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy", + DeletionRetentionStrategyConfig.create(20), + ZonedDateTime.now(ZoneId.of("UTC")), + "standard", + 1, + false, + Duration.standardSeconds(5L), + null, + null, + null, + true, + null + ); + + return createIndexSet(spec, indexSetSummary); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/JsonUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/JsonUtils.java new file mode 100644 index 000000000000..4dbae416b9ee --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/JsonUtils.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.graylog2.shared.bindings.providers.ObjectMapperProvider; + +public final class JsonUtils { + + private JsonUtils(){} + + public static String toJsonString(Object s) { + try { + return new ObjectMapperProvider().get().writeValueAsString(s); + } catch (JsonProcessingException e) { + throw new RuntimeException("Failed to serialize Search", e); + } + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/RangeUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/RangeUtils.java new file mode 100644 index 000000000000..41f6f1a62e20 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/RangeUtils.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange; +import org.graylog2.plugin.indexer.searches.timeranges.InvalidRangeParametersException; + +public final class RangeUtils { + private RangeUtils(){} + + public static AbsoluteRange allMessagesTimeRange() { + try { + return AbsoluteRange.create("2010-01-01T00:00:00.0Z", "2050-01-01T00:00:00.0Z"); + } catch (InvalidRangeParametersException e) { + throw new RuntimeException("boo hoo", e); + } + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/SearchUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/SearchUtils.java new file mode 100644 index 000000000000..698ea6067ef3 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/SearchUtils.java @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import io.restassured.specification.RequestSpecification; +import org.graylog.plugins.views.search.rest.MappedFieldTypeDTO; +import org.graylog.testing.backenddriver.SearchDriver; +import org.graylog.testing.completebackend.apis.GraylogApis; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +public final class SearchUtils { + + private SearchUtils() { + } + + public static List searchForAllMessages(Supplier spec) { + List messages = new ArrayList<>(); + + WaitUtils.waitFor(() -> captureMessages(messages::addAll, spec), "Timed out waiting for messages to be present"); + + return messages; + } + + public static boolean waitForMessage(Supplier spec, String message) { + WaitUtils.waitFor(() -> captureMessage(spec, message), "Timed out waiting for message to be present"); + return true; + } + + private static boolean captureMessage(Supplier spec, String message) { + return SearchDriver.searchAllMessages(spec).contains(message); + } + + private static boolean captureMessages(Consumer> messagesCaptor, + Supplier spec) { + List messages = SearchDriver.searchAllMessages(spec); + if (!messages.isEmpty()) { + messagesCaptor.accept(messages); + return true; + } + return false; + } + + public static Set waitForFieldTypeDefinitions(Supplier spec, String... fieldName) { + final Set expectedFields = Arrays.stream(fieldName).collect(Collectors.toSet()); + return WaitUtils.waitForObject(() -> { + final List knownTypes = SearchDriver.getFieldTypes(spec); + final Set filtered = knownTypes.stream().filter(t -> expectedFields.contains(t.name())).collect(Collectors.toSet()); + if (filtered.size() == expectedFields.size()) { + return Optional.of(filtered); + } else { + return Optional.empty(); + } + }, "Timed out waiting for field definition"); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/SerializationUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/SerializationUtils.java new file mode 100644 index 000000000000..18aa39bdf5e4 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/SerializationUtils.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.graylog2.shared.bindings.providers.ObjectMapperProvider; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; + +public class SerializationUtils { + public static InputStream serialize(Object request) { + try { + final ObjectMapper objectMapper = new ObjectMapperProvider().get(); + return new ByteArrayInputStream(objectMapper.writeValueAsBytes(request)); + } catch (JsonProcessingException e) { + throw new IllegalStateException("Error serializing test fixture: ", e); + } + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/StreamUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/StreamUtils.java new file mode 100644 index 000000000000..19db4aba3858 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/StreamUtils.java @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.restassured.response.ValidatableResponse; +import io.restassured.specification.RequestSpecification; +import org.graylog.testing.completebackend.apis.GraylogApis; + +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public final class StreamUtils { + private StreamUtils() {} + + public record StreamRule(@JsonProperty("type") int type, + @JsonProperty("value") String value, + @JsonProperty("field") String field, + @JsonProperty("inverted") boolean inverted) {} + record CreateStreamRequest(@JsonProperty("title") String title, + @JsonProperty("rules") Collection streamRules, + @JsonProperty("index_set_id") String indexSetId) {} + + public static String createStream(Supplier spec, String title, String indexSetId, StreamRule... streamRules) { + final CreateStreamRequest body = new CreateStreamRequest(title, List.of(streamRules), indexSetId); + final String streamId = given() + .spec(spec.get()) + .when() + .body(body) + .post("/streams") + .then() + .log().ifError() + .statusCode(201) + .assertThat().body("stream_id", notNullValue()) + .extract().body().jsonPath().getString("stream_id"); + + given() + .spec(spec.get()) + .when() + .post("/streams/" + streamId + "/resume") + .then() + .log().ifError() + .statusCode(204); + + return streamId; + } + + public static ValidatableResponse getStream(Supplier spec, String streamId) { + return given() + .spec(spec.get()) + .when() + .get("/streams/" + streamId) + .then() + .log().ifError() + .statusCode(200) + .assertThat().body("id", equalTo(streamId)); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/UserUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/UserUtils.java new file mode 100644 index 000000000000..57fd6bfda5ac --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/UserUtils.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.restassured.path.json.JsonPath; +import io.restassured.specification.RequestSpecification; +import org.graylog.testing.completebackend.apis.GraylogApis; + +import java.util.List; +import java.util.function.Supplier; + +import static io.restassured.RestAssured.given; + +public class UserUtils { + + public static record User(@JsonProperty("username") String username, + @JsonProperty("password") String password, + @JsonProperty("first_name") String firstName, + @JsonProperty("last_name") String lastName, + @JsonProperty("email") String email, + @JsonProperty("service_account") boolean serviceAccount, + @JsonProperty("session_timeout_ms") long sessionTimeoutMs, + @JsonProperty("timezone") String timezone, + @JsonProperty("roles") List roles, + @JsonProperty("permissions") List permissions + ) { + } + + public static JsonPath createUser(Supplier spec, User user) { + given() + .spec(spec.get()) + .when() + .body(user) + .post("/users") + .then() + .log().ifError() + .statusCode(201); + + return getUserInfo(spec, user.username); + } + + public static JsonPath getUserInfo(Supplier spec, String username) { + return given() + .spec(spec.get()) + .when() + .get("/users/" + username) + .then() + .log().ifError() + .statusCode(200) + .extract().jsonPath(); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog/testing/utils/WaitUtils.java b/full-backend-tests/src/test/java/org/graylog/testing/utils/WaitUtils.java new file mode 100644 index 000000000000..0fad4fbb1e60 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog/testing/utils/WaitUtils.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.testing.utils; + +import org.glassfish.jersey.internal.util.Producer; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.Optional; + +import static org.junit.Assert.fail; + +public final class WaitUtils { + + private static final int TIMEOUT_MS = 10000; + private static final int SLEEP_MS = 500; + + private WaitUtils() { + } + + public static void waitFor(Producer predicate, String timeoutErrorMessage) { + waitForObject(() -> predicate.call() ? Optional.of(true) : Optional.empty(), timeoutErrorMessage); + } + + public static T waitForObject(Producer> predicate, String timeoutErrorMessage) { + return waitForObject(predicate, timeoutErrorMessage, Duration.of(TIMEOUT_MS, ChronoUnit.MILLIS)); + } + + public static T waitForObject(Producer> predicate, String timeoutErrorMessage, Duration timeout) { + int msPassed = 0; + while (msPassed <= timeout.toMillis()) { + final Optional result = predicate.call(); + if (result != null && result.isPresent()) { + return result.get(); + } + msPassed += SLEEP_MS; + sleep(); + } + throw new AssertionError(timeoutErrorMessage); + } + + private static void sleep() { + try { + Thread.sleep(WaitUtils.SLEEP_MS); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/indexer/counts/CountsIT.java b/full-backend-tests/src/test/java/org/graylog2/indexer/counts/CountsIT.java new file mode 100644 index 000000000000..983394ef6876 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/indexer/counts/CountsIT.java @@ -0,0 +1,248 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.indexer.counts; + +import com.google.common.collect.ImmutableMap; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.elasticsearch.BulkIndexRequest; +import org.graylog.testing.elasticsearch.ContainerMatrixElasticsearchBaseTest; +import org.graylog.testing.elasticsearch.SearchServerInstance; +import org.graylog2.indexer.IndexNotFoundException; +import org.graylog2.indexer.IndexSet; +import org.graylog2.indexer.IndexSetRegistry; +import org.graylog2.indexer.indexset.IndexSetConfig; +import org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy; +import org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig; +import org.graylog2.indexer.rotation.strategies.MessageCountRotationStrategy; +import org.graylog2.indexer.rotation.strategies.MessageCountRotationStrategyConfig; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.TestMethodOrder; + +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Map; +import java.util.Optional; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +// these tests only test the SearchServer, so there is only one MongoDB-version necessary (needed, to launch the tests) +@ContainerMatrixTestsConfiguration +@TestMethodOrder(MethodOrderer.OrderAnnotation.class) +public class CountsIT extends ContainerMatrixElasticsearchBaseTest { + private static final String INDEX_NAME_1 = "index_set_1_counts_test_0"; + private static final String INDEX_NAME_2 = "index_set_2_counts_test_0"; + private static final String INDEX_NAME_3 = "index_set_3_counts_test_0"; + + private IndexSetRegistry indexSetRegistry; + private IndexSet indexSet1; + private IndexSet indexSet2; + private IndexSet indexSet3; + private Counts counts; + private IndexSetConfig indexSetConfig1; + private IndexSetConfig indexSetConfig2; + private IndexSetConfig indexSetConfig3; + + public CountsIT(SearchServerInstance elasticsearch) { + super(elasticsearch); + } + + @BeforeAll + public void setUp() { + // @TestInstance(PER_CLASS) and the MockitoExtension don't work together, initialization has to be done manually + indexSetRegistry = mock(IndexSetRegistry.class); + indexSet1 = mock(IndexSet.class); + indexSet2 = mock(IndexSet.class); + indexSet3 = mock(IndexSet.class); + + client().createIndex(INDEX_NAME_1, 1, 0); + client().createIndex(INDEX_NAME_2, 1, 0); + client().createIndex(INDEX_NAME_3, 1, 0); + client().waitForGreenStatus(INDEX_NAME_1, INDEX_NAME_2, INDEX_NAME_3); + + counts = new Counts(indexSetRegistry, searchServer().adapters().countsAdapter()); + + indexSetConfig1 = IndexSetConfig.builder() + .id("id-1") + .title("title-1") + .indexPrefix("index_set_1_counts_test") + .shards(1) + .replicas(0) + .rotationStrategyClass(MessageCountRotationStrategy.class.getCanonicalName()) + .rotationStrategyConfig(MessageCountRotationStrategyConfig.createDefault()) + .retentionStrategyClass(DeletionRetentionStrategy.class.getCanonicalName()) + .retentionStrategyConfig(DeletionRetentionStrategyConfig.createDefault()) + .creationDate(ZonedDateTime.of(2016, 10, 12, 0, 0, 0, 0, ZoneOffset.UTC)) + .indexAnalyzer("standard") + .indexTemplateName("template-1") + .indexOptimizationMaxNumSegments(1) + .indexOptimizationDisabled(false) + .build(); + + indexSetConfig2 = IndexSetConfig.builder() + .id("id-2") + .title("title-2") + .indexPrefix("index_set_2_counts_test") + .shards(1) + .replicas(0) + .rotationStrategyClass(MessageCountRotationStrategy.class.getCanonicalName()) + .rotationStrategyConfig(MessageCountRotationStrategyConfig.createDefault()) + .retentionStrategyClass(DeletionRetentionStrategy.class.getCanonicalName()) + .retentionStrategyConfig(DeletionRetentionStrategyConfig.createDefault()) + .creationDate(ZonedDateTime.of(2016, 10, 13, 0, 0, 0, 0, ZoneOffset.UTC)) + .indexAnalyzer("standard") + .indexTemplateName("template-2") + .indexOptimizationMaxNumSegments(1) + .indexOptimizationDisabled(false) + .build(); + + indexSetConfig3 = IndexSetConfig.builder() + .id("id-3") + .title("title-3") + .indexPrefix("index_set_3_counts_test") + .shards(1) + .replicas(0) + .rotationStrategyClass(MessageCountRotationStrategy.class.getCanonicalName()) + .rotationStrategyConfig(MessageCountRotationStrategyConfig.createDefault()) + .retentionStrategyClass(DeletionRetentionStrategy.class.getCanonicalName()) + .retentionStrategyConfig(DeletionRetentionStrategyConfig.createDefault()) + .creationDate(ZonedDateTime.of(2016, 10, 13, 0, 0, 0, 0, ZoneOffset.UTC)) + .indexAnalyzer("standard") + .indexTemplateName("template-3") + .indexOptimizationMaxNumSegments(1) + .indexOptimizationDisabled(false) + .build(); + } + + @BeforeEach + public void initMocks() { + when(indexSetRegistry.getManagedIndices()).thenReturn(new String[]{INDEX_NAME_1, INDEX_NAME_2}); + when(indexSetRegistry.get(indexSetConfig1.id())).thenReturn(Optional.of(indexSet1)); + when(indexSetRegistry.get(indexSetConfig2.id())).thenReturn(Optional.of(indexSet2)); + when(indexSetRegistry.get(indexSetConfig3.id())).thenReturn(Optional.of(indexSet3)); + when(indexSet1.getManagedIndices()).thenReturn(new String[]{INDEX_NAME_1}); + when(indexSet2.getManagedIndices()).thenReturn(new String[]{INDEX_NAME_2}); + when(indexSet3.getManagedIndices()).thenReturn(new String[]{INDEX_NAME_3}); + } + + @ContainerMatrixTest + @Order(1) + public void totalReturnsZeroWithEmptyIndex() { + assertThat(counts.total()).isEqualTo(0L); + assertThat(counts.total(indexSet1)).isEqualTo(0L); + assertThat(counts.total(indexSet2)).isEqualTo(0L); + assertThat(counts.total(indexSet3)).isEqualTo(0L); + } + + @ContainerMatrixTest + @Order(2) + public void totalReturnsZeroWithNoIndices() { + final BulkIndexRequest bulkIndexRequest = new BulkIndexRequest(); + for (int i = 0; i < 10; i++) { + final Map source = ImmutableMap.of( + "foo", "bar", + "counter", i); + bulkIndexRequest.addRequest(INDEX_NAME_3, source); + } + + client().bulkIndex(bulkIndexRequest); + + // Simulate no indices for the second index set. + when(indexSet2.getManagedIndices()).thenReturn(new String[0]); + + assertThat(counts.total(indexSet1)).isEqualTo(0L); + assertThat(counts.total(indexSet2)).isEqualTo(0L); + assertThat(counts.total(indexSet3)).isEqualTo(10L); + + // Simulate no indices for all index sets. + when(indexSetRegistry.getManagedIndices()).thenReturn(new String[0]); + + assertThat(counts.total()).isEqualTo(0L); + } + + @ContainerMatrixTest + public void totalReturnsNumberOfMessages() { + final BulkIndexRequest bulkIndexRequest = new BulkIndexRequest(); + + final int count1 = 10; + for (int i = 0; i < count1; i++) { + final Map source = ImmutableMap.of( + "foo", "bar", + "counter", i); + bulkIndexRequest.addRequest(INDEX_NAME_1, source); + } + + final int count2 = 5; + for (int i = 0; i < count2; i++) { + final Map source = ImmutableMap.of( + "foo", "bar", + "counter", i); + bulkIndexRequest.addRequest(INDEX_NAME_2, source); + } + + client().bulkIndex(bulkIndexRequest); + + assertThat(counts.total()).isEqualTo(count1 + count2); + assertThat(counts.total(indexSet1)).isEqualTo(count1); + assertThat(counts.total(indexSet2)).isEqualTo(count2); + } + + @ContainerMatrixTest + public void totalThrowsElasticsearchExceptionIfIndexDoesNotExist() { + final IndexSet indexSet = mock(IndexSet.class); + when(indexSet.getManagedIndices()).thenReturn(new String[]{"does_not_exist"}); + + try { + counts.total(indexSet); + fail("Expected IndexNotFoundException"); + } catch (IndexNotFoundException e) { + final String expectedErrorDetail = "Index not found for query: does_not_exist. Try recalculating your index ranges."; + assertThat(e) + .hasMessageStartingWith("Fetching message count failed for indices [does_not_exist]") + .hasMessageEndingWith(expectedErrorDetail) + .hasNoSuppressedExceptions(); + assertThat(e.getErrorDetails()).containsExactly(expectedErrorDetail); + } + } + + @ContainerMatrixTest + public void totalSucceedsWithListOfIndicesLargerThan4Kilobytes() { + final int numberOfIndices = 100; + final String[] indexNames = new String[numberOfIndices]; + final String indexPrefix = "very_long_list_of_indices_0123456789_counts_it_"; + final IndexSet indexSet = mock(IndexSet.class); + + for (int i = 0; i < numberOfIndices; i++) { + final String indexName = indexPrefix + i; + client().createIndex(indexName); + indexNames[i] = indexName; + } + + when(indexSet.getManagedIndices()).thenReturn(indexNames); + + final String indicesString = String.join(",", indexNames); + assertThat(indicesString.length()).isGreaterThanOrEqualTo(4096); + + assertThat(counts.total(indexSet)).isEqualTo(0L); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/indexer/fieldtypes/FieldTypeMappingsIT.java b/full-backend-tests/src/test/java/org/graylog2/indexer/fieldtypes/FieldTypeMappingsIT.java new file mode 100644 index 000000000000..4d64bfaa0353 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/indexer/fieldtypes/FieldTypeMappingsIT.java @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.indexer.fieldtypes; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.completebackend.apis.Streams; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Locale; +import java.util.Set; + +import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; + +@ContainerMatrixTestsConfiguration() +public class FieldTypeMappingsIT { + private static final String INDEX_PREFIX = "custom-mappings"; + private final GraylogApis api; + + public FieldTypeMappingsIT(GraylogApis api) { + this.api = api; + } + + record FieldTypeChangeRequest(@JsonProperty("index_sets") + Set indexSetsIds, + @JsonProperty("field") + String fieldName, + @JsonProperty("type") + String type, + @JsonProperty("rotate") + boolean rotateImmediately) { + + } + + @ContainerMatrixTest + void changeFieldTypeFromStringToIp() { + var indexSet = api.indices().createIndexSet("Field Type Mappings Test", "Testing custom field type mapping", INDEX_PREFIX); + var stream = api.streams().createStream("Field Type Mappings Stream", indexSet, Streams.StreamRule.exact("field-type-mappings-test", "test-id", false)); + var gelfInput = api.gelf().createGelfHttpInput(); + waitForNewIndex(indexSet, INDEX_PREFIX + "_0"); + + gelfInput.postMessage(messageWithTimestamp("2019-07-23 09:53:08.175")); + + api.search().waitForMessage("field-type-mappings-test"); + var previousType = new ArrayList<>(api.fieldTypes().waitForFieldTypeDefinitions("source_ip")); + assertThat(previousType.get(0)).isNotNull() + .satisfies(fieldType -> assertThat(fieldType.type()).isEqualTo(FieldTypeMapper.STRING_TYPE)); + + given() + .config(api.withGraylogBackendFailureConfig()) + .spec(api.requestSpecification()) + .when() + .body(new FieldTypeChangeRequest(Set.of(indexSet), "source_ip", "ip", true)) + .put("/system/indices/mappings") + .then() + .statusCode(200); + + waitForNewIndex(indexSet, INDEX_PREFIX + "_1"); + + gelfInput.postMessage(messageWithTimestamp("2023-07-23 09:53:08.175")); + + api.waitFor(() -> { + var fieldTypes = api.fieldTypes().getFieldTypes(AbsoluteRange.create("2023-01-01T00:00:00.000Z", "2024-01-01T00:00:00.000Z"), Set.of(stream)); + var sourceIpField = fieldTypes.stream().filter(type -> type.name().equals("source_ip")).findFirst(); + + return sourceIpField.map(type -> type.type().type().equals("ip")).orElse(false); + }, "Waiting for field type to change to `ip` timed out.", Duration.ofSeconds(60)); + } + + private void waitForNewIndex(String indexSetId, String indexName) { + api.waitFor(() -> api.get("/system/indexer/indices/" + indexSetId + "/list ", 200) + .extract() + .body() + .asString() + .contains(indexName), "Waiting for new index after rotation timed out.", Duration.ofMinutes(3)); + } + + public String messageWithTimestamp(String timestamp) { + return String.format(Locale.ROOT, """ + { + "short_message":"field-type-mappings-test", + "test_id": "field-type-mappings-test", + "source":"example.org", + "source_ip": "192.168.1.1", + "timestamp": "%s", + "level":3 + }""", timestamp); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/indexer/indices/IndicesIT.java b/full-backend-tests/src/test/java/org/graylog2/indexer/indices/IndicesIT.java new file mode 100644 index 000000000000..997cc1666dca --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/indexer/indices/IndicesIT.java @@ -0,0 +1,683 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.indexer.indices; + +import com.github.joschi.jadconfig.util.Duration; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.eventbus.EventBus; +import com.google.common.eventbus.Subscribe; +import org.apache.commons.codec.binary.Base64; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.elasticsearch.ContainerMatrixElasticsearchBaseTest; +import org.graylog.testing.elasticsearch.SearchServerInstance; +import org.graylog2.audit.NullAuditEventSender; +import org.graylog2.indexer.IgnoreIndexTemplate; +import org.graylog2.indexer.IndexMappingFactory; +import org.graylog2.indexer.IndexNotFoundException; +import org.graylog2.indexer.IndexSet; +import org.graylog2.indexer.IndexSetStatsCreator; +import org.graylog2.indexer.IndexTemplateNotFoundException; +import org.graylog2.indexer.MessageIndexTemplateProvider; +import org.graylog2.indexer.TestIndexSet; +import org.graylog2.indexer.cluster.Node; +import org.graylog2.indexer.indexset.IndexSetConfig; +import org.graylog2.indexer.indexset.profile.IndexFieldTypeProfileService; +import org.graylog2.indexer.indices.blocks.IndicesBlockStatus; +import org.graylog2.indexer.indices.events.IndicesClosedEvent; +import org.graylog2.indexer.indices.events.IndicesDeletedEvent; +import org.graylog2.indexer.indices.events.IndicesReopenedEvent; +import org.graylog2.indexer.indices.stats.IndexStatistics; +import org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy; +import org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig; +import org.graylog2.indexer.rotation.strategies.MessageCountRotationStrategy; +import org.graylog2.indexer.rotation.strategies.MessageCountRotationStrategyConfig; +import org.graylog2.indexer.searches.IndexRangeStats; +import org.graylog2.plugin.Tools; +import org.graylog2.plugin.system.NodeId; +import org.graylog2.plugin.system.SimpleNodeId; +import org.graylog2.rest.resources.system.indexer.responses.IndexSetStats; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; + +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatCode; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +// these tests only test the SearchServer, so there is only one MongoDB-version necessary (needed, to launch the tests) +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS) +public class IndicesIT extends ContainerMatrixElasticsearchBaseTest { + private static final String INDEX_NAME = "graylog_0"; + private static final IndexSetConfig indexSetConfig = IndexSetConfig.builder() + .id("index-set-1") + .title("Index set 1") + .description("For testing") + .indexPrefix("test_index_set") + .creationDate(ZonedDateTime.now()) + .shards(1) + .replicas(0) + .rotationStrategyClass(MessageCountRotationStrategy.class.getCanonicalName()) + .rotationStrategyConfig(MessageCountRotationStrategyConfig.createDefault()) + .retentionStrategyClass(DeletionRetentionStrategy.class.getCanonicalName()) + .retentionStrategyConfig(DeletionRetentionStrategyConfig.createDefault()) + .indexAnalyzer("standard") + .indexTemplateName("template-1") + .indexOptimizationMaxNumSegments(1) + .indexOptimizationDisabled(false) + .build(); + protected static final IndexSet indexSet = new TestIndexSet(indexSetConfig); + private final Set indicesToCleanUp = new HashSet<>(); + protected Indices indices; + private EventBus eventBus; + private final NodeId nodeId = new SimpleNodeId("5ca1ab1e-0000-4000-a000-000000000000"); + + public IndicesIT(SearchServerInstance elasticsearch) { + super(elasticsearch); + } + + @BeforeEach + public void setUp() { + eventBus = new EventBus("indices-test"); + final Node node = new Node(searchServer().adapters().nodeAdapter()); + final IndexMappingFactory indexMappingFactory = new IndexMappingFactory(node, + ImmutableMap.of(MessageIndexTemplateProvider.MESSAGE_TEMPLATE_TYPE, new MessageIndexTemplateProvider())); + indices = new Indices( + indexMappingFactory, + nodeId, + new NullAuditEventSender(), + eventBus, + searchServer().adapters().indicesAdapter(), + mock(IndexFieldTypeProfileService.class) + ); + } + + @AfterEach + public void cleanUp() { + indicesToCleanUp.forEach(client()::deleteIndices); + indicesToCleanUp.clear(); + } + + protected String createRandomIndex(final String prefix) { + final String index = client().createRandomIndex(prefix); + indicesToCleanUp.add(index); + return index; + } + + @ContainerMatrixTest + public void testGetIndicesBlocksStatus() { + final String index = createRandomIndex("indices_it_"); + + IndicesBlockStatus indicesBlocksStatus = indices.getIndicesBlocksStatus(Collections.singletonList(index)); + assertEquals(0, indicesBlocksStatus.countBlockedIndices()); + + client().setIndexBlock(index); + indicesBlocksStatus = indices.getIndicesBlocksStatus(Collections.singletonList(index)); + assertEquals(1, indicesBlocksStatus.countBlockedIndices()); + final Collection indexBlocks = indicesBlocksStatus.getIndexBlocks(index); + assertEquals(1, indexBlocks.size()); + assertTrue(indexBlocks.contains("index.blocks.read_only_allow_delete")); + + client().resetIndexBlock(index); + indicesBlocksStatus = indices.getIndicesBlocksStatus(Collections.singletonList(index)); + assertEquals(0, indicesBlocksStatus.countBlockedIndices()); + } + + @ContainerMatrixTest + public void testDelete() { + final String index = createRandomIndex("indices_it_"); + indices.delete(index); + + assertThat(client().indicesExists(index)).isFalse(); + } + + @ContainerMatrixTest + public void testClose() { + final String index = createRandomIndex("indices_it_"); + + assertThat(indices.isOpen(index)).isTrue(); + + indices.close(index); + + assertThat(indices.isClosed(index)).isTrue(); + } + + @ContainerMatrixTest + public void findClosedIndices() { + final String index1 = createRandomIndex("indices_it_"); + client().closeIndex(index1); + final String index2 = createRandomIndex("otherindices_it_"); + client().closeIndex(index2); + client().createRandomIndex("evenmoreindices_it_"); + + final Set closedIndices = indices.getClosedIndices(Collections.singleton("*")); + + assertThat(closedIndices).containsExactlyInAnyOrder(index1, index2); + } + + @ContainerMatrixTest + public void aliasExistsReturnsIfGivenIndexNameIsIndexOrAlias() { + final String index = createRandomIndex("indices_it_"); + final String alias = "graylog_alias_exists"; + assertThat(indices.aliasExists(alias)).isFalse(); + + client().addAliasMapping(index, alias); + + assertThat(indices.aliasExists(alias)).isTrue(); + assertThat(indices.exists(alias)).isFalse(); + } + + @ContainerMatrixTest + public void aliasExistsReturnsIfGivenIndexHasAlias() { + final String indexName = createRandomIndex("indices_it_"); + + assertThat(indices.aliasExists(indexName)).isFalse(); + } + + @ContainerMatrixTest + public void existsIndicatesPresenceOfGivenIndex() { + final String indexName = createRandomIndex("indices_it_"); + + assertThat(indices.exists(indexName)).isTrue(); + } + + @ContainerMatrixTest + public void existsReturnsFalseIfGivenIndexDoesNotExists() { + final String indexNotAlias = "graylog_index_does_not_exist"; + assertThat(indices.exists(indexNotAlias)).isFalse(); + } + + @ContainerMatrixTest + public void aliasTargetReturnsListOfTargetsGivenAliasIsPointingTo() { + final String index = createRandomIndex("indices_it_"); + final String alias = "graylog_alias_target"; + assertThat(indices.aliasTarget(alias)).isEmpty(); + + client().addAliasMapping(index, alias); + + assertThat(indices.aliasTarget(alias)).contains(index); + } + + @ContainerMatrixTest + public void indexRangeStatsOfIndexReturnsMinMaxTimestampsForGivenIndex() { + importFixture("org/graylog2/indexer/indices/IndicesIT.json"); + + IndexRangeStats stats = indices.indexRangeStatsOfIndex(INDEX_NAME); + + assertThat(stats.min()).isEqualTo(new DateTime(2015, 1, 1, 1, 0, DateTimeZone.UTC)); + assertThat(stats.max()).isEqualTo(new DateTime(2015, 1, 1, 5, 0, DateTimeZone.UTC)); + } + + @ContainerMatrixTest + public void indexRangeStatsWorksForEmptyIndex() { + final String indexName = createRandomIndex("indices_it_"); + + IndexRangeStats stats = indices.indexRangeStatsOfIndex(indexName); + + assertThat(stats.min()).isEqualTo(new DateTime(0L, DateTimeZone.UTC)); + assertThat(stats.max()).isEqualTo(new DateTime(0L, DateTimeZone.UTC)); + } + + @ContainerMatrixTest + public void indexRangeStatsThrowsExceptionIfIndexIsClosed() { + assertThrows(IndexNotFoundException.class, () -> { + final String index = createRandomIndex("indices_it_"); + + client().closeIndex(index); + + indices.indexRangeStatsOfIndex(index); + }); + } + + @ContainerMatrixTest + public void indexRangeStatsThrowsExceptionIfIndexDoesNotExists() { + assertThrows(IndexNotFoundException.class, () -> indices.indexRangeStatsOfIndex("does-not-exist")); + } + + @ContainerMatrixTest + public void createEnsuresIndexTemplateExists() { + final String indexName = "index_template_test"; + indicesToCleanUp.add(indexName); + + final String templateName = indexSetConfig.indexTemplateName(); + + assertThat(client().templateExists(templateName)).isFalse(); + + indices.create(indexName, indexSet); + + assertThat(client().templateExists(templateName)).isTrue(); + assertThat(client().fieldType(indexName, "message")).isEqualTo("text"); + } + + @ContainerMatrixTest + public void createOverwritesIndexTemplate() { + final String indexName = "index_template_test"; + indicesToCleanUp.add(indexName); + + final String templateName = indexSetConfig.indexTemplateName(); + + final Map beforeMapping = ImmutableMap.of( + "_source", ImmutableMap.of("enabled", false), + "properties", ImmutableMap.of("message", + ImmutableMap.of("type", "text"))); + + var templateSource = Template.create(indexSet.getIndexWildcard(), new Template.Mappings(beforeMapping), 1L, new Template.Settings(Map.of())); + + client().putTemplate(templateName, templateSource); + + indices.create(indexName, indexSet); + + assertThat(client().fieldType(indexName, "message")).isEqualTo("text"); + } + + @ContainerMatrixTest + public void indexCreationDateReturnsIndexCreationDateOfExistingIndexAsDateTime() { + final DateTime now = DateTime.now(DateTimeZone.UTC); + final String indexName = createRandomIndex("indices_it_"); + + final Optional indexCreationDate = indices.indexCreationDate(indexName); + assertThat(indexCreationDate).isNotEmpty() + .hasValueSatisfying(date -> assertThat(date.toDate()).isCloseTo(now.toDate(), TimeUnit.SECONDS.toMillis(1))); + } + + @ContainerMatrixTest + public void indexCreationDateReturnsEmptyOptionalForNonExistingIndex() { + assertThat(indices.indexCreationDate("index_missing")).isEmpty(); + } + + @ContainerMatrixTest + public void closePostsIndicesClosedEvent() { + final org.graylog2.indexer.indices.IndicesIT.IndicesEventListener listener = new org.graylog2.indexer.indices.IndicesIT.IndicesEventListener(); + eventBus.register(listener); + + final String index = createRandomIndex("indices_it_"); + + indices.close(index); + + assertThat(listener.indicesClosedEvents).containsOnly(IndicesClosedEvent.create(index)); + assertThat(listener.indicesDeletedEvents).isEmpty(); + assertThat(listener.indicesReopenedEvents).isEmpty(); + } + + @ContainerMatrixTest + public void deletePostsIndicesDeletedEvent() { + final org.graylog2.indexer.indices.IndicesIT.IndicesEventListener listener = new org.graylog2.indexer.indices.IndicesIT.IndicesEventListener(); + eventBus.register(listener); + + final String index = createRandomIndex("indices_it_"); + + indices.delete(index); + + assertThat(listener.indicesDeletedEvents).containsOnly(IndicesDeletedEvent.create(index)); + assertThat(listener.indicesClosedEvents).isEmpty(); + assertThat(listener.indicesReopenedEvents).isEmpty(); + } + + @ContainerMatrixTest + public void reopenIndexPostsIndicesReopenedEvent() { + final org.graylog2.indexer.indices.IndicesIT.IndicesEventListener listener = new org.graylog2.indexer.indices.IndicesIT.IndicesEventListener(); + eventBus.register(listener); + + final String index = createRandomIndex("indices_it_"); + + client().closeIndex(index); + + indices.reopenIndex(index); + + assertThat(listener.indicesReopenedEvents).containsOnly(IndicesReopenedEvent.create(index)); + assertThat(listener.indicesClosedEvents).isEmpty(); + assertThat(listener.indicesDeletedEvents).isEmpty(); + } + + @ContainerMatrixTest + public void ensureIndexTemplateDoesntThrowOnIgnoreIndexTemplateAndExistingTemplate() { + final String templateName = indexSetConfig.indexTemplateName(); + + indices.ensureIndexTemplate(indexSet); + + assertThat(client().templateExists(templateName)).isTrue(); + + indices = new Indices( + createThrowingIndexMappingFactory(indexSetConfig), + nodeId, + new NullAuditEventSender(), + eventBus, + searchServer().adapters().indicesAdapter(), + mock(IndexFieldTypeProfileService.class)); + + assertThatCode(() -> indices.ensureIndexTemplate(indexSet)).doesNotThrowAnyException(); + + assertThat(client().templateExists(templateName)).isTrue(); + } + + private IndexMappingFactory createThrowingIndexMappingFactory(IndexSetConfig indexSetConfig) { + final IndexMappingFactory indexMappingFactory = mock(IndexMappingFactory.class); + when(indexMappingFactory.createIndexMapping(any())) + .thenThrow(new IgnoreIndexTemplate(true, "Reason", + indexSetConfig.indexPrefix(), indexSetConfig.indexTemplateName(), + indexSetConfig.indexTemplateType().orElse(null))); + return indexMappingFactory; + } + + @ContainerMatrixTest + public void ensureIndexTemplateThrowsOnIgnoreIndexTemplateAndNonExistingTemplate() { + final String templateName = indexSetConfig.indexTemplateName(); + + try { + indices.deleteIndexTemplate(indexSet); + } catch (Exception ignored) { + } + + assertThat(client().templateExists(templateName)).isFalse(); + + indices = new Indices( + createThrowingIndexMappingFactory(indexSetConfig), + nodeId, + new NullAuditEventSender(), + eventBus, + searchServer().adapters().indicesAdapter(), + mock(IndexFieldTypeProfileService.class)); + + assertThatCode(() -> indices.ensureIndexTemplate(indexSet)) + .isExactlyInstanceOf(IndexTemplateNotFoundException.class) + .hasMessage("No index template with name 'template-1' (type - 'null') found in Elasticsearch"); + } + + @ContainerMatrixTest + public void getIndices() { + final IndexSet indexSet = new TestIndexSet(indexSetConfig.toBuilder().indexPrefix("indices_it").build()); + final String index1 = createRandomIndex("indices_it_"); + final String index2 = createRandomIndex("indices_it_"); + + client().closeIndex(index2); + + assertThat(indices.getIndices(indexSet)) + .containsOnly(index1, index2); + assertThat(indices.getIndices(indexSet, "open", "close")) + .containsOnly(index1, index2); + assertThat(indices.getIndices(indexSet, "open")) + .containsOnly(index1); + assertThat(indices.getIndices(indexSet, "close")) + .containsOnly(index2); + } + + @ContainerMatrixTest + public void testIndexId() { + final String index = createRandomIndex("indices_it_"); + String uuid = indices.getIndexId(index); + assertThat(uuid).isNotEmpty(); + assert (Base64.isBase64(uuid)); + } + + @ContainerMatrixTest + public void storeSizeInBytesReturnsValue() { + final String index = createRandomIndex("foo"); + + final Optional storeSizeInBytes = indices.getStoreSizeInBytes(index); + + assertThat(storeSizeInBytes).isNotEmpty(); + } + + @ContainerMatrixTest + public void retrievesCreationTimeOfIndexInUTC() { + final String index = createRandomIndex("foo"); + + final Optional creationDate = indices.indexCreationDate(index); + + assertThat(creationDate).hasValueSatisfying(dt -> + assertThat(dt.getZone()).isEqualTo(DateTimeZone.UTC)); + } + + @ContainerMatrixTest + public void canStoreAndRetrieveIndexClosingDate() { + final String index = createRandomIndex("foo"); + final DateTime someDate = Tools.nowUTC(); + + indices.setClosingDate(index, someDate); + + final Optional closingDate = indices.indexClosingDate(index); + + assertThat(closingDate).hasValueSatisfying(dt -> { + assertThat(dt).isEqualTo(someDate); + assertThat(dt.getZone()).isEqualTo(DateTimeZone.UTC); + }); + } + + @ContainerMatrixTest + public void setClosingDateMergesExistingMetaDataEntries() { + final String index = createRandomIndex("foo"); + client().updateMapping(index, Map.of("_meta", Map.of("existing", "should be kept"))); + + indices.setClosingDate(index, Tools.nowUTC()); + + final Map mapping = client().getMapping(index); + assertThat(mapping.get("_meta")).satisfies(v -> { + assertThat(v).isNotNull(); + //noinspection unchecked + assertThat(((Map) v).get("existing")).isEqualTo("should be kept"); + }); + final Optional closingDate = indices.indexClosingDate(index); + assertThat(closingDate).isNotEmpty(); + } + + @ContainerMatrixTest + public void canHandleMissingIndexClosingDate() { + final String index = createRandomIndex("foo"); + final Optional closingDate = indices.indexClosingDate(index); + assertThat(closingDate).isEmpty(); + } + + @ContainerMatrixTest + public void retrievesAllAliasesForIndex() { + final String index1 = createRandomIndex("foo-"); + final String index2 = createRandomIndex("foo-"); + + client().addAliasMapping(index1, "alias1"); + client().addAliasMapping(index2, "alias2"); + client().addAliasMapping(index2, "alias3"); + + final Map> indexNamesAndAliases = indices.getIndexNamesAndAliases("foo-*"); + + assertThat(indexNamesAndAliases) + .containsAllEntriesOf( + ImmutableMap.of( + index1, Collections.singleton("alias1"), + index2, ImmutableSet.of("alias2", "alias3") + ) + ); + } + + @ContainerMatrixTest + public void retrieveIndexStatisticsForIndices() { + final String index = createRandomIndex("indices_it_"); + + final Set indicesStats = indices.getIndicesStats(Collections.singleton(index)); + + assertThat(indicesStats).isNotEmpty(); + } + + @ContainerMatrixTest + public void cyclingDeflectorMovesAliasFromOldToNewTarget() { + final String deflector = "indices_it_deflector"; + + final String index1 = createRandomIndex("indices_it_"); + final String index2 = createRandomIndex("indices_it_"); + + client().addAliasMapping(index1, deflector); + + assertThat(indices.aliasTarget(deflector)).hasValue(index1); + + indices.cycleAlias(deflector, index2, index1); + + assertThat(indices.aliasTarget(deflector)).hasValue(index2); + } + + @ContainerMatrixTest + public void retrievingIndexStatsForWildcard() { + final IndexSetStatsCreator indexSetStatsCreator = new IndexSetStatsCreator(indices); + final String indexPrefix = "indices_wildcard_"; + final String wildcard = indexPrefix + "*"; + final IndexSet indexSet = mock(IndexSet.class); + when(indexSet.getIndexWildcard()).thenReturn(wildcard); + + createRandomIndex(indexPrefix); + createRandomIndex(indexPrefix); + + final IndexSetStats indexSetStats = indexSetStatsCreator.getForIndexSet(indexSet); + + assertThat(indexSetStats.indices()).isEqualTo(2L); + assertThat(indexSetStats.size()).isNotZero(); + } + + @ContainerMatrixTest + public void waitForRedIndexReturnsStatus() { + final HealthStatus healthStatus = indices.waitForRecovery("this_index_does_not_exist", 0); + + assertThat(healthStatus).isEqualTo(HealthStatus.Red); + } + + @ContainerMatrixTest + public void numberOfMessagesReturnsCorrectSize() { + importFixture("org/graylog2/indexer/indices/IndicesIT.json"); + + assertThat(indices.numberOfMessages("graylog_0")).isEqualTo(10); + } + + @ContainerMatrixTest + public void optimizeIndexJobDoesNotThrowException() { + importFixture("org/graylog2/indexer/indices/IndicesIT.json"); + + indices.optimizeIndex("graylog_0", 1, Duration.minutes(1)); + } + + @ContainerMatrixTest + public void aliasTargetReturnsListOfTargetsGivenAliasIsPointingToWithWildcards() { + final String index = createRandomIndex("indices_it_"); + final String alias = "graylog_alias_target"; + assertThat(indices.aliasTarget(alias)).isEmpty(); + + client().addAliasMapping(index, alias); + + assertThat(indices.aliasTarget("graylog_alias_*")).contains(index); + } + + @ContainerMatrixTest + public void aliasTargetSupportsIndicesWithPlusInName() { + final String prefixWithPlus = "index+set_"; + final String index = createRandomIndex(prefixWithPlus); + final String alias = prefixWithPlus + "deflector"; + assertThat(indices.aliasTarget(alias)).isEmpty(); + + client().addAliasMapping(index, alias); + + assertThat(indices.aliasTarget(prefixWithPlus + "*")).contains(index); + } + + @ContainerMatrixTest + public void removeAliasesRemovesSecondTarget() { + final String randomIndices = "random_"; + final String index = createRandomIndex(randomIndices); + final String index2 = createRandomIndex(randomIndices); + final String alias = randomIndices + "deflector"; + assertThat(indices.aliasTarget(alias)).isEmpty(); + + client().addAliasMapping(index, alias); + client().addAliasMapping(index2, alias); + + assertThatThrownBy(() -> indices.aliasTarget(alias)) + .isInstanceOf(TooManyAliasesException.class); + + indices.removeAliases(alias, Collections.singleton(index)); + + assertThat(indices.aliasTarget(alias)).contains(index2); + } + + // Prevent accidental use of AliasActions.Type.REMOVE_INDEX, + // as despite being an *Alias* Action, it actually deletes an index! + @ContainerMatrixTest + public void cyclingAliasLeavesOldIndexInPlace() { + final String deflector = "indices_it_deflector"; + + final String index1 = createRandomIndex("indices_it_"); + final String index2 = createRandomIndex("indices_it_"); + + client().addAliasMapping(index1, deflector); + + indices.cycleAlias(deflector, index2, index1); + + assertThat(indices.exists(index1)).isTrue(); + } + + @ContainerMatrixTest + public void getIndexShardsInfo() { + client().createIndex("1shard1replica", 1, 1); + List shardsInfo = indices.getShardsInfo("1shard1replica"); + assertThat(shardsInfo.size()).isEqualTo(2); + assertThat(shardsInfo.stream() + .filter(info -> info.shardType() == ShardsInfo.ShardType.PRIMARY) + .findFirst()) + .isNotEmpty(); + assertThat(shardsInfo.stream() + .filter(info -> info.shardType() == ShardsInfo.ShardType.REPLICA) + .findFirst()) + .isNotEmpty(); + } + + public static final class IndicesEventListener { + final List indicesClosedEvents = Collections.synchronizedList(new ArrayList<>()); + final List indicesDeletedEvents = Collections.synchronizedList(new ArrayList<>()); + final List indicesReopenedEvents = Collections.synchronizedList(new ArrayList<>()); + + @Subscribe + @SuppressWarnings("unused") + public void handleIndicesClosedEvent(IndicesClosedEvent event) { + indicesClosedEvents.add(event); + } + + @Subscribe + @SuppressWarnings("unused") + public void handleIndicesDeletedEvent(IndicesDeletedEvent event) { + indicesDeletedEvents.add(event); + } + + @Subscribe + @SuppressWarnings("unused") + public void handleIndicesReopenedEvent(IndicesReopenedEvent event) { + indicesReopenedEvents.add(event); + } + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/indexer/indices/IndicesWithComposableIndexTemplatesIT.java b/full-backend-tests/src/test/java/org/graylog2/indexer/indices/IndicesWithComposableIndexTemplatesIT.java new file mode 100644 index 000000000000..509419f28fda --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/indexer/indices/IndicesWithComposableIndexTemplatesIT.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.indexer.indices; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.elasticsearch.SearchServerInstance; + +import static org.graylog2.indexer.Constants.COMPOSABLE_INDEX_TEMPLATES_FEATURE; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS, enabledFeatureFlags = COMPOSABLE_INDEX_TEMPLATES_FEATURE) +public class IndicesWithComposableIndexTemplatesIT extends IndicesIT { + public IndicesWithComposableIndexTemplatesIT(SearchServerInstance elasticsearch) { + super(elasticsearch); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/inputs/InputCreationIT.java b/full-backend-tests/src/test/java/org/graylog2/inputs/InputCreationIT.java new file mode 100644 index 000000000000..7ae68c2aa84e --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/inputs/InputCreationIT.java @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.inputs; + +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS) +public class InputCreationIT { + + private final GraylogApis apis; + + public InputCreationIT(GraylogApis apis) { + this.apis = apis; + } + + @ContainerMatrixTest + void testHttpRandomInputCreation() { + String inputId = apis.inputs().createGlobalInput("testInput", + "org.graylog2.inputs.random.FakeHttpMessageInput", + Map.of("sleep", 30, + "sleep_deviation", 30, + "source", "example.org")); + apis.inputs().setRunningState(inputId); + apis.inputs().getInput(inputId) + .assertThat().body("title", equalTo("testInput")); + apis.waitFor(() -> + apis.inputs().getInputState(inputId) + .extract().body().jsonPath().get("state") + .equals("RUNNING"), + "Timed out waiting for HTTP Random Message Input to become available"); + apis.inputs().deleteInput(inputId); + } + + /** + * Test to make sure configuration encryption serialization/deserialization works + */ + @ContainerMatrixTest + void testFailingAwsCloudTrailInputCreation() { + String inputId = apis.inputs().createGlobalInput("testInput", + "org.graylog.aws.inputs.cloudtrail.CloudTrailInput", + Map.of("aws_sqs_region", "us-east-1", + "aws_s3_region", "us-east-1", + "aws_sqs_queue_name", "invalid-queue-no-messages-read", + "aws_access_key", "invalid-access-key", + "aws_secret_key", "invalid-secret-key")); + apis.inputs().setRunningState(inputId); + apis.inputs().getInput(inputId) + .assertThat().body("attributes.aws_access_key", equalTo("invalid-access-key")); + apis.waitFor(() -> + apis.inputs().getInputState(inputId) + .extract().body().jsonPath().get("state") + .equals("FAILING"), + "Timed out waiting for AWS CloudTrail Input to reach failing state"); + apis.inputs().deleteInput(inputId); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/periodical/IndexRangesCleanUpIT.java b/full-backend-tests/src/test/java/org/graylog2/periodical/IndexRangesCleanUpIT.java new file mode 100644 index 000000000000..5ffea5e78252 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/periodical/IndexRangesCleanUpIT.java @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.periodical; + +import com.github.rholder.retry.RetryException; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static org.assertj.core.api.Assertions.assertThat; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS) +public class IndexRangesCleanUpIT { + public static final String RANGE_CLEANUP_PREFIX = "range-cleanup"; + public static final String INDEX_TWO = RANGE_CLEANUP_PREFIX + "_1"; + public static final String INDEX_ONE = RANGE_CLEANUP_PREFIX + "_0"; + private final GraylogApis api; + + public IndexRangesCleanUpIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void testCleanUp() throws ExecutionException, RetryException { + String indexSetId = api.indices().createIndexSet("Range clean up", "test index range clean up", RANGE_CLEANUP_PREFIX); + + //Rotate to create indices 0 & 1 + api.indices().rotateIndexSet(indexSetId); + api.indices().rotateIndexSet(indexSetId); + + assertThat(getIndexRangesList()).isNotEmpty().contains(INDEX_ONE, INDEX_TWO); + + //Deleting index should automatically remove the range + api.indices().deleteIndex(INDEX_ONE); + + assertThat(getIndexRangesList()).isNotEmpty().doesNotContain(INDEX_ONE); + + //Deleting index set without deleting underlying indices + api.indices().deleteIndexSet(indexSetId, false); + assertThat(getIndexRangesList()).isNotEmpty().contains(INDEX_TWO); + + //Trigger clean up periodical over api + api.indices().rebuildIndexRanges(); + assertThat(getIndexRangesList()).isNotEmpty().doesNotContain(INDEX_TWO); + } + + private List getIndexRangesList() throws ExecutionException, RetryException { + return RetryerBuilder.>newBuilder() + .withWaitStrategy(WaitStrategies.fixedWait(1, TimeUnit.SECONDS)) + .withStopStrategy(StopStrategies.stopAfterAttempt(3)) + .retryIfResult(List::isEmpty) + .build() + .call(() -> api.indices().listIndexRanges().properJSONPath().read("ranges.*.index_name")); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/streams/AssignStreamsToIndexSetIT.java b/full-backend-tests/src/test/java/org/graylog2/streams/AssignStreamsToIndexSetIT.java new file mode 100644 index 000000000000..5531ab05e356 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/streams/AssignStreamsToIndexSetIT.java @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.streams; + +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.MongodbServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog.testing.utils.IndexSetUtils; +import org.graylog.testing.utils.StreamUtils; +import org.junit.jupiter.api.BeforeAll; + +import java.util.Collection; +import java.util.List; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.equalTo; + +@ContainerMatrixTestsConfiguration +public class AssignStreamsToIndexSetIT { + private static final String STREAMS_RESOURCE = "/streams"; + + private final GraylogApis api; + + public AssignStreamsToIndexSetIT(GraylogApis api) { + this.api = api; + } + + private String defaultIndexSetId; + private String newIndexSetId; + private String stream1Id; + private String stream2Id; + private String stream3Id; + + @BeforeAll + void beforeAll() { + this.defaultIndexSetId = IndexSetUtils.defaultIndexSetId(api.requestSpecificationSupplier()); + this.stream1Id = StreamUtils.createStream(api.requestSpecificationSupplier(), "New Stream", defaultIndexSetId); + this.stream2Id = StreamUtils.createStream(api.requestSpecificationSupplier(), "New Stream 2", defaultIndexSetId); + this.stream3Id = StreamUtils.createStream(api.requestSpecificationSupplier(), "New Stream 3", defaultIndexSetId); + this.newIndexSetId = IndexSetUtils.createIndexSet(api.requestSpecificationSupplier(), "Test Indices", "Some test indices", "test"); + } + + @ContainerMatrixTest + void assignStreamsToIndexSet() { + assignToIndexSet(List.of(stream1Id, stream2Id, stream3Id), newIndexSetId) + .statusCode(200); + + StreamUtils.getStream(api.requestSpecificationSupplier(), stream1Id) + .assertThat().body("index_set_id", equalTo(newIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream2Id) + .assertThat().body("index_set_id", equalTo(newIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream3Id) + .assertThat().body("index_set_id", equalTo(newIndexSetId)); + + assignToIndexSet(List.of(stream1Id, stream2Id, stream3Id), defaultIndexSetId) + .statusCode(200); + + StreamUtils.getStream(api.requestSpecificationSupplier(), stream1Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream2Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream3Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + } + + @ContainerMatrixTest + void assignStreamsToMissingIndexSet() { + assignToIndexSet(List.of(stream1Id, stream2Id, stream3Id), "doesnotexist") + .statusCode(404); + + StreamUtils.getStream(api.requestSpecificationSupplier(), stream1Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream2Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream3Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + } + + @ContainerMatrixTest + void assignMissingStreamToIndexSet() { + assignToIndexSet(List.of(stream1Id, stream2Id, stream3Id, "6389c6a9205a90634f992bce"), newIndexSetId) + .statusCode(404); + + StreamUtils.getStream(api.requestSpecificationSupplier(), stream1Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream2Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + StreamUtils.getStream(api.requestSpecificationSupplier(), stream3Id) + .assertThat().body("index_set_id", equalTo(defaultIndexSetId)); + } + + private ValidatableResponse assignToIndexSet(Collection streamIds, String indexSetId) { + return given() + .spec(api.requestSpecificationSupplier().get()) + .log().ifValidationFails() + .when() + .body(streamIds) + .put(STREAMS_RESOURCE + "/indexSet/" + indexSetId) + .then() + .log().ifValidationFails(); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/streams/StreamsIT.java b/full-backend-tests/src/test/java/org/graylog2/streams/StreamsIT.java new file mode 100644 index 000000000000..c55d50b5d319 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/streams/StreamsIT.java @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.streams; + +import io.restassured.response.ValidatableResponse; +import org.graylog.testing.completebackend.Lifecycle; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; +import org.graylog2.rest.bulk.model.BulkOperationRequest; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; + +import java.util.ArrayList; +import java.util.List; + +import static io.restassured.RestAssured.given; +import static org.graylog2.rest.models.tools.responses.PageListResponse.ELEMENTS_FIELD_NAME; +import static org.hamcrest.Matchers.equalTo; + +@ContainerMatrixTestsConfiguration(serverLifecycle = Lifecycle.CLASS) +public class StreamsIT { + private static final String STREAMS_RESOURCE = "/streams"; + + private final GraylogApis api; + private final List createdStreamsIds; + private final List createdIndexSetIds; + + public StreamsIT(GraylogApis api) { + this.api = api; + this.createdStreamsIds = new ArrayList<>(); + this.createdIndexSetIds = new ArrayList<>(); + } + + @BeforeAll + void beforeAll() { + final String defaultIndexSetId = api.indices().defaultIndexSetId(); + final String newIndexSetId = api.indices().createIndexSet("Test Indices", "Some test indices", "streamstest"); + final String newIndexSetId2 = api.indices().createIndexSet("More Test Indices", "Some more test indices", "moretest"); + this.createdIndexSetIds.add(newIndexSetId); + this.createdIndexSetIds.add(newIndexSetId2); + createdStreamsIds.add(api.streams().createStream("New Stream", newIndexSetId)); + createdStreamsIds.add(api.streams().createStream("New Stream 2", defaultIndexSetId)); + createdStreamsIds.add(api.streams().createStream("New Stream 3", newIndexSetId2)); + + createdStreamsIds.add(api.streams().createStream("sorttest: aaaaa", defaultIndexSetId)); + createdStreamsIds.add(api.streams().createStream("sorttest: ZZZZZZ", defaultIndexSetId, false)); + createdStreamsIds.add(api.streams().createStream("sorttest: 12345", defaultIndexSetId, false)); + } + + @AfterAll + void afterAll() { + createdStreamsIds.forEach(streamId -> api.streams().deleteStream(streamId)); + createdIndexSetIds.forEach(indexSetId -> api.indices().deleteIndexSet(indexSetId, true)); + } + + @ContainerMatrixTest + void bulkPauseAndResumeWorksCorrectly() { + //Testing pause and resume in the same test, as other test checks sorting by status, so I want to bring back original situation + + //picking "New Stream" and "sorttest: aaaaa" for test, adding one wrong ID + final List bulkEntityIds = List.of( + createdStreamsIds.get(0), + createdStreamsIds.get(3), + "wrong ID!"); + + //test bulk pause + given() + .spec(api.requestSpecification()) + .log().ifValidationFails() + .when() + .body(new BulkOperationRequest(bulkEntityIds)) + .post(STREAMS_RESOURCE + "/bulk_pause") + .then() + .log().ifValidationFails() + .assertThat() + .statusCode(200) + .body("successfully_performed", equalTo(2)) + .body("failures[0].entity_id", equalTo("wrong ID!")); + + api.streams().getStream(createdStreamsIds.get(0)).body("disabled", equalTo(true)); + api.streams().getStream(createdStreamsIds.get(3)).body("disabled", equalTo(true)); + + //test bulk resume + given() + .spec(api.requestSpecification()) + .log().ifValidationFails() + .when() + .body(new BulkOperationRequest(bulkEntityIds)) + .post(STREAMS_RESOURCE + "/bulk_resume") + .then() + .log().ifValidationFails() + .assertThat() + .statusCode(200) + .body("successfully_performed", equalTo(2)) + .body("failures[0].entity_id", equalTo("wrong ID!")); + + api.streams().getStream(createdStreamsIds.get(0)).body("disabled", equalTo(false)); + api.streams().getStream(createdStreamsIds.get(3)).body("disabled", equalTo(false)); + } + + @ContainerMatrixTest + void sortByIndexSetTitle() { + paginatedByFieldWithOrder("New", "title", "asc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("New Stream", "New Stream 2", "New Stream 3"))); + paginatedByFieldWithOrder("New", "title", "desc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("New Stream 3", "New Stream 2", "New Stream"))); + paginatedByFieldWithOrder("New", "index_set_title", "asc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("New Stream 2", "New Stream 3", "New Stream"))); + paginatedByFieldWithOrder("New", "index_set_title", "desc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("New Stream", "New Stream 3", "New Stream 2"))); + } + + @ContainerMatrixTest + void sortByTitleCaseInsensitive() { + paginatedByFieldWithOrder("sorttest", "title", "asc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("sorttest: 12345", "sorttest: aaaaa", "sorttest: ZZZZZZ"))); + paginatedByFieldWithOrder("sorttest", "title", "desc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("sorttest: ZZZZZZ", "sorttest: aaaaa", "sorttest: 12345"))); + } + + @ContainerMatrixTest + void sortByStatus() { + paginatedByFieldWithOrder("sorttest", "disabled", "asc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("sorttest: aaaaa", "sorttest: ZZZZZZ", "sorttest: 12345"))); + paginatedByFieldWithOrder("sorttest", "disabled", "desc") + .assertThat() + .body(ELEMENTS_FIELD_NAME + "*.title", equalTo(List.of("sorttest: ZZZZZZ", "sorttest: 12345", "sorttest: aaaaa"))); + } + + private ValidatableResponse paginatedByFieldWithOrder(String query, String field, String order) { + return given() + .spec(api.requestSpecification()) + .log().ifValidationFails() + .when() + .queryParam("query", query) + .queryParam("sort", field) + .queryParam("order", order) + .get(STREAMS_RESOURCE + "/paginated") + .then() + .log().ifValidationFails(); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/suggestions/EntitySuggestionsIT.java b/full-backend-tests/src/test/java/org/graylog2/suggestions/EntitySuggestionsIT.java new file mode 100644 index 000000000000..04c968421b3b --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/suggestions/EntitySuggestionsIT.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.suggestions; + +import io.restassured.response.ValidatableResponse; +import org.apache.commons.lang3.RandomStringUtils; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +@ContainerMatrixTestsConfiguration(searchVersions = SearchServer.OS2_LATEST) +public class EntitySuggestionsIT { + private final GraylogApis api; + + public EntitySuggestionsIT(GraylogApis api) { + this.api = api; + } + + @ContainerMatrixTest + void returnsTitlesForDashboards() { + final var randomIdentifier = RandomStringUtils.randomAlphanumeric(8); + final var dashboard1 = api.dashboards().createDashboard("First " + randomIdentifier); + final var dashboard2 = api.dashboards().createDashboard("Second " + randomIdentifier); + final var dashboard3 = api.dashboards().createDashboard("Third " + randomIdentifier); + retrieveSuggestions(1, 100, randomIdentifier) + .body("pagination.total", equalTo(3)) + .body("pagination.count", equalTo(3)) + .body("suggestions[0].value", equalTo("First " + randomIdentifier)) + .body("suggestions[0].id", equalTo(dashboard1)) + .body("suggestions[1].value", equalTo("Second " + randomIdentifier)) + .body("suggestions[1].id", equalTo(dashboard2)) + .body("suggestions[2].value", equalTo("Third " + randomIdentifier)) + .body("suggestions[2].id", equalTo(dashboard3)); + retrieveSuggestions(1, 1, randomIdentifier) + .body("pagination.total", equalTo(3)) + .body("pagination.count", equalTo(1)) + .body("suggestions[0].value", equalTo("First " + randomIdentifier)) + .body("suggestions[0].id", equalTo(dashboard1)); + retrieveSuggestions(2, 1, randomIdentifier) + .body("pagination.total", equalTo(3)) + .body("pagination.count", equalTo(1)) + .body("suggestions[0].value", equalTo("Second " + randomIdentifier)) + .body("suggestions[0].id", equalTo(dashboard2)); + retrieveSuggestions(3, 1, randomIdentifier) + .body("pagination.total", equalTo(3)) + .body("pagination.count", equalTo(1)) + .body("suggestions[0].value", equalTo("Third " + randomIdentifier)) + .body("suggestions[0].id", equalTo(dashboard3)); + retrieveSuggestions(4, 1, randomIdentifier) + .body("pagination.total", equalTo(3)) + .body("pagination.count", equalTo(0)) + .body("suggestions[0]", nullValue()); + } + + private ValidatableResponse retrieveSuggestions(int page, int perPage, String query) { + return given() + .spec(api.requestSpecification()) + .log().ifValidationFails() + .when() + .get("/entity_suggestions?page=" + page + "&per_page=" + perPage + "&collection=dashboards&column=title&query=" + query) + .then() + .log().ifValidationFails() + .assertThat() + .statusCode(200); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceBase.java b/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceBase.java new file mode 100644 index 000000000000..df481f380b89 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceBase.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.web.resources; + +import io.restassured.http.ContentType; +import io.restassured.specification.RequestSpecification; +import org.apache.http.HttpStatus; +import org.graylog.testing.completebackend.apis.GraylogApis; + +import static io.restassured.RestAssured.given; + +public abstract class WebInterfaceAssetsResourceBase { + private final GraylogApis apis; + + protected WebInterfaceAssetsResourceBase(GraylogApis apis) { + this.apis = apis; + } + + private RequestSpecification backend() { + return given() + .baseUri(apis.backend().uri()) + .port(apis.backend().apiPort()); + } + + protected void testFrontend(String prefix) { + final var scriptSrcs = backend() + .get(prefix) + .then() + .assertThat() + .statusCode(HttpStatus.SC_OK) + .contentType(ContentType.HTML) + .extract() + .htmlPath() + .getList("html.body.script*.@src"); + + scriptSrcs.forEach(src -> { + backend() + .get(src) + .then() + .assertThat() + .statusCode(HttpStatus.SC_OK) + .contentType(ContentType.JSON); + }); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceIT.java b/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceIT.java new file mode 100644 index 000000000000..f0712fdee5a9 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceIT.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.web.resources; + +import org.graylog.testing.completebackend.MavenProjectDirProviderWithFrontend; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +@ContainerMatrixTestsConfiguration(mavenProjectDirProvider = MavenProjectDirProviderWithFrontend.class, + searchVersions = {SearchServer.DATANODE_DEV}) +public class WebInterfaceAssetsResourceIT extends WebInterfaceAssetsResourceBase { + public WebInterfaceAssetsResourceIT(GraylogApis graylogApis) { + super(graylogApis); + } + + @ContainerMatrixTest + void testIndexHtml() { + testFrontend("/"); + } +} diff --git a/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceWithPrefixIT.java b/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceWithPrefixIT.java new file mode 100644 index 000000000000..41fb247cc148 --- /dev/null +++ b/full-backend-tests/src/test/java/org/graylog2/web/resources/WebInterfaceAssetsResourceWithPrefixIT.java @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog2.web.resources; + +import org.graylog.testing.completebackend.MavenProjectDirProviderWithFrontend; +import org.graylog.testing.completebackend.apis.GraylogApis; +import org.graylog.testing.containermatrix.SearchServer; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTest; +import org.graylog.testing.containermatrix.annotations.ContainerMatrixTestsConfiguration; + +@ContainerMatrixTestsConfiguration(mavenProjectDirProvider = MavenProjectDirProviderWithFrontend.class, + searchVersions = {SearchServer.DATANODE_DEV}, + additionalConfigurationParameters = { + @ContainerMatrixTestsConfiguration.ConfigurationParameter(key = "GRAYLOG_HTTP_PUBLISH_URI", value = "http://localhost:9000/graylog") + }) +public class WebInterfaceAssetsResourceWithPrefixIT extends WebInterfaceAssetsResourceBase { + public WebInterfaceAssetsResourceWithPrefixIT(GraylogApis graylogApis) { + super(graylogApis); + } + + @ContainerMatrixTest + void testIndexHtml() { + testFrontend("/graylog/"); + } +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/aggregations/messages-for-missing-aggregation-check.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/aggregations/messages-for-missing-aggregation-check.json new file mode 100644 index 000000000000..d73aab5550ea --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/aggregations/messages-for-missing-aggregation-check.json @@ -0,0 +1,127 @@ +{ + "documents": [ + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "0" + } + }, + { + "data": { + "source": "fbi.org", + "message": "Personal data added", + "timestamp": "2022-01-01 01:00:00.000", + "fixtureType": "474877", + "firstName": "Joe", + "lastName": "Smith", + "age": 50, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "1" + } + }, + { + "data": { + "source": "fbi.org", + "message": "Personal data added", + "timestamp": "2022-01-01 01:00:00.000", + "fixtureType": "474877", + "firstName": "Jane", + "lastName": "Smith", + "age": 40, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "2" + } + }, + { + "data": { + "source": "fbi.org", + "message": "Personal data added", + "timestamp": "2022-01-01 01:00:00.000", + "fixtureType": "474877", + "firstName": "Joe", + "lastName": "Biden", + "age": 80, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "3" + } + }, + { + "data": { + "source": "fbi.org", + "message": "Personal data added", + "timestamp": "2022-01-01 01:00:00.000", + "fixtureType": "474877", + "lastName": "Cooper", + "age": 60, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "4" + } + }, + { + "data": { + "source": "fbi.org", + "message": "Personal data added", + "timestamp": "2022-01-01 01:00:00.000", + "fixtureType": "474877", + "firstName": "Bob", + "age": 60, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/aggregations/random-http-logs.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/aggregations/random-http-logs.json new file mode 100644 index 000000000000..ef65a416b26d --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/aggregations/random-http-logs.json @@ -0,0 +1,27006 @@ +{ + "documents": [ + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 0 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:19.998Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:19.998", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ], + "test_boolean": true + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 1 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.024Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:20.024", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ], + "test_boolean": false + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 2 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.050Z GET /login [200] 46ms", + "timestamp": "2022-09-26 14:12:20.050", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 3 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.078Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:20.078", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 4 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.097Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:20.097", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 5 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.122Z GET /posts/45326 [200] 65ms", + "timestamp": "2022-09-26 14:12:20.122", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 6 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.152Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:20.152", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 7 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.176Z GET /login [200] 42ms", + "timestamp": "2022-09-26 14:12:20.176", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 8 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.196Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:20.196", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 9 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.215Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:20.215", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 10 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.238Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:20.238", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 11 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.266Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:20.266", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 12 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.290Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:20.290", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 13 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.319Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:20.319", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 14 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "DELETE", + "took_ms": 66, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.348Z DELETE /login [204] 66ms", + "timestamp": "2022-09-26 14:12:20.348", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 15 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.372Z GET /posts/45326 [200] 63ms", + "timestamp": "2022-09-26 14:12:20.372", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 16 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 141, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.394Z POST /posts [201] 141ms", + "timestamp": "2022-09-26 14:12:20.394", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 17 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.416Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:20.416", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 18 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.447Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:20.447", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 19 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 73, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.473Z PUT /posts/45326 [200] 73ms", + "timestamp": "2022-09-26 14:12:20.473", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 20 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 103, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.500Z DELETE /posts [204] 103ms", + "timestamp": "2022-09-26 14:12:20.500", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 21 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.522Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:20.522", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 22 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.548Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:20.548", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 23 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.568Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:20.568", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 24 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 84, + "user_id": 54351, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.595Z DELETE /posts/45326 [204] 84ms", + "timestamp": "2022-09-26 14:12:20.595", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 25 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.623Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:20.623", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 26 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.642Z GET /posts [200] 65ms", + "timestamp": "2022-09-26 14:12:20.642", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 27 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.670Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:20.670", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 28 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "POST", + "took_ms": 147, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.690Z POST /users [201] 147ms", + "timestamp": "2022-09-26 14:12:20.690", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 29 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.717Z GET /posts/45326 [200] 36ms", + "timestamp": "2022-09-26 14:12:20.717", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 30 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.736Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:20.736", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 31 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.766Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:20.766", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 32 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.795Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:20.795", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 33 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.826Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:20.826", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 34 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.846Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:20.846", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 35 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.874Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:20.874", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 36 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.892Z GET /posts/45326 [200] 38ms", + "timestamp": "2022-09-26 14:12:20.892", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 37 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 65, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.918Z GET /login [200] 65ms", + "timestamp": "2022-09-26 14:12:20.918", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 38 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.943Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:20.943", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 39 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.961Z GET /login [200] 37ms", + "timestamp": "2022-09-26 14:12:20.961", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 40 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:20.984Z GET /users [200] 62ms", + "timestamp": "2022-09-26 14:12:20.984", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 41 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 78, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.011Z DELETE /posts/45326 [204] 78ms", + "timestamp": "2022-09-26 14:12:21.011", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 42 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.040Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:21.040", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 43 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.067Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:21.067", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 44 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.088Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:21.088", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 45 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.111Z GET /posts/45326/edit [200] 39ms", + "timestamp": "2022-09-26 14:12:21.111", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 46 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 123, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.131Z POST /posts [201] 123ms", + "timestamp": "2022-09-26 14:12:21.131", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 47 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.160Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:21.160", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 48 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "PUT", + "took_ms": 92, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.180Z PUT /login [200] 92ms", + "timestamp": "2022-09-26 14:12:21.180", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 49 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.210Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:21.210", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 50 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.234Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:21.234", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 51 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.261Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:21.261", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 52 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.292Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:21.292", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 53 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.321Z GET /login [200] 60ms", + "timestamp": "2022-09-26 14:12:21.321", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 54 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.347Z GET /posts/45326/edit [200] 40ms", + "timestamp": "2022-09-26 14:12:21.347", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 55 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.379Z GET /posts/45326 [200] 38ms", + "timestamp": "2022-09-26 14:12:21.379", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 56 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.401Z GET /login [200] 48ms", + "timestamp": "2022-09-26 14:12:21.401", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 57 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.425Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:21.425", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 58 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.444Z GET /posts/45326 [200] 56ms", + "timestamp": "2022-09-26 14:12:21.444", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 59 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.470Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:21.470", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 60 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.489Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:21.489", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 61 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.521Z GET /users [200] 58ms", + "timestamp": "2022-09-26 14:12:21.521", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 62 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.553Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:21.553", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 63 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.571Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:21.571", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 64 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.601Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:21.601", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 65 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.625Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:21.625", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 66 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.650Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:21.650", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 67 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.672Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:21.672", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 68 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.698Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:21.698", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 69 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.720Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:21.720", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 70 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.745Z GET /posts/45326 [200] 36ms", + "timestamp": "2022-09-26 14:12:21.745", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 71 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 71, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.768Z PUT /posts [200] 71ms", + "timestamp": "2022-09-26 14:12:21.768", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 72 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 104, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.789Z DELETE /posts [204] 104ms", + "timestamp": "2022-09-26 14:12:21.789", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 73 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.820Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:21.820", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 74 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.845Z GET /login [200] 57ms", + "timestamp": "2022-09-26 14:12:21.845", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 75 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.864Z GET /login [200] 65ms", + "timestamp": "2022-09-26 14:12:21.864", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 76 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.892Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:21.892", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 77 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.923Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:21.923", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 78 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.941Z GET /login [200] 36ms", + "timestamp": "2022-09-26 14:12:21.941", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 79 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.964Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:21.964", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 80 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 158, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:21.988Z POST /posts [201] 158ms", + "timestamp": "2022-09-26 14:12:21.988", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 81 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.018Z GET /posts/45326 [200] 42ms", + "timestamp": "2022-09-26 14:12:22.018", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 82 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.045Z GET /posts/45326 [200] 52ms", + "timestamp": "2022-09-26 14:12:22.045", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 83 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 162, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.069Z POST /posts/45326 [201] 162ms", + "timestamp": "2022-09-26 14:12:22.069", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 84 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.095Z GET /posts/45326 [200] 37ms", + "timestamp": "2022-09-26 14:12:22.095", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 85 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.117Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:22.117", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 86 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.147Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:22.147", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 87 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.177Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:22.177", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 88 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "DELETE", + "took_ms": 98, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.201Z DELETE /login [204] 98ms", + "timestamp": "2022-09-26 14:12:22.201", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 89 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 65, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.229Z DELETE /posts [204] 65ms", + "timestamp": "2022-09-26 14:12:22.229", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 90 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.249Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:22.249", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 91 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.273Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:22.273", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 92 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 94, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.302Z DELETE /posts [204] 94ms", + "timestamp": "2022-09-26 14:12:22.302", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 93 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.332Z GET /login [200] 61ms", + "timestamp": "2022-09-26 14:12:22.332", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 94 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.356Z GET /posts/45326 [500] 55ms", + "timestamp": "2022-09-26 14:12:22.356", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 95 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 98, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.381Z DELETE /posts [204] 98ms", + "timestamp": "2022-09-26 14:12:22.381", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 96 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 118, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.407Z PUT /posts [200] 118ms", + "timestamp": "2022-09-26 14:12:22.407", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 97 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 125, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.436Z PUT /posts [200] 125ms", + "timestamp": "2022-09-26 14:12:22.436", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 98 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.454Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:22.454", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 99 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.483Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:22.483", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 100 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.513Z GET /login [200] 59ms", + "timestamp": "2022-09-26 14:12:22.513", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 101 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.536Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:22.536", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 102 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.567Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:22.567", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 103 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "POST", + "took_ms": 129, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.594Z POST /login [201] 129ms", + "timestamp": "2022-09-26 14:12:22.594", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 104 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 92, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.618Z PUT /posts [200] 92ms", + "timestamp": "2022-09-26 14:12:22.618", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 105 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.641Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:22.641", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 106 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 58, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.667Z GET /users [200] 58ms", + "timestamp": "2022-09-26 14:12:22.667", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 107 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 123, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.697Z POST /posts [201] 123ms", + "timestamp": "2022-09-26 14:12:22.697", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 108 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.725Z GET /login [200] 36ms", + "timestamp": "2022-09-26 14:12:22.725", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 109 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.745Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:22.745", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 110 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.770Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:22.770", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 111 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.799Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:22.799", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 112 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.827Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:22.827", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 113 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.854Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:22.854", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 114 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.879Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:22.879", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 115 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.899Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:22.899", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 116 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.921Z GET /posts/45326 [200] 57ms", + "timestamp": "2022-09-26 14:12:22.921", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 117 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.945Z GET /posts/45326 [200] 64ms", + "timestamp": "2022-09-26 14:12:22.945", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 118 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:22.976Z GET /posts/45326 [200] 56ms", + "timestamp": "2022-09-26 14:12:22.976", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 119 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.003Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:23.003", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 120 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.027Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:23.027", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 121 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.050Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:23.050", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 122 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.076Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:23.076", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 123 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 86, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.102Z DELETE /posts [204] 86ms", + "timestamp": "2022-09-26 14:12:23.102", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 124 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.133Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:23.133", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 125 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.153Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:23.153", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 126 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.180Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:23.180", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 127 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.206Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:23.206", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 128 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.236Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:23.236", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 129 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 173, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.257Z POST /posts/45326 [201] 173ms", + "timestamp": "2022-09-26 14:12:23.257", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 130 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 36, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.281Z GET /login [200] 36ms", + "timestamp": "2022-09-26 14:12:23.281", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 131 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.302Z GET /login [200] 55ms", + "timestamp": "2022-09-26 14:12:23.302", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 132 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "DELETE", + "took_ms": 83, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.331Z DELETE /login [204] 83ms", + "timestamp": "2022-09-26 14:12:23.331", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 133 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.360Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:23.360", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 134 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.391Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:23.391", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 135 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.419Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:23.419", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 136 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.445Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:23.445", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 137 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.473Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:23.473", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 138 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.504Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:23.504", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 139 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.525Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:23.525", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 140 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.555Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:23.555", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 141 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.581Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:23.581", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 142 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 44, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.612Z GET /users [200] 44ms", + "timestamp": "2022-09-26 14:12:23.612", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 143 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.638Z GET /login [200] 43ms", + "timestamp": "2022-09-26 14:12:23.638", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 144 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.665Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:23.665", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 145 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 164, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.685Z POST /posts [201] 164ms", + "timestamp": "2022-09-26 14:12:23.685", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 146 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.703Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:23.703", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 147 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.723Z GET /posts/45326/edit [500] 46ms", + "timestamp": "2022-09-26 14:12:23.723", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 148 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.754Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:23.754", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 149 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 37, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.782Z GET /login [200] 37ms", + "timestamp": "2022-09-26 14:12:23.782", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 150 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.800Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:23.800", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 151 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.822Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:23.822", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 152 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.850Z GET /login [200] 58ms", + "timestamp": "2022-09-26 14:12:23.850", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 153 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 63, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.877Z DELETE /posts [204] 63ms", + "timestamp": "2022-09-26 14:12:23.877", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 154 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 111, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.902Z PUT /posts [200] 111ms", + "timestamp": "2022-09-26 14:12:23.902", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 155 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.931Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:23.931", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 156 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.961Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:23.961", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 157 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:23.982Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:23.982", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 158 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.002Z GET /login [200] 45ms", + "timestamp": "2022-09-26 14:12:24.002", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 159 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.028Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:24.028", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 160 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.056Z GET /posts/45326 [200] 42ms", + "timestamp": "2022-09-26 14:12:24.056", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 161 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.078Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:24.078", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 162 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.106Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:24.106", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 163 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.128Z GET /posts/45326 [200] 58ms", + "timestamp": "2022-09-26 14:12:24.128", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 164 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.155Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:24.155", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 165 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.174Z DELETE /posts [204] 63ms", + "timestamp": "2022-09-26 14:12:24.174", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 166 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.199Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:24.199", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 167 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.224Z GET /users [200] 44ms", + "timestamp": "2022-09-26 14:12:24.224", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 168 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.251Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:24.251", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 169 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.271Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:24.271", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 170 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.301Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:24.301", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 171 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.332Z GET /posts/45326 [200] 58ms", + "timestamp": "2022-09-26 14:12:24.332", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 172 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.363Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:24.363", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 173 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.383Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:24.383", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 174 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 156, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.403Z POST /posts [201] 156ms", + "timestamp": "2022-09-26 14:12:24.403", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 175 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.432Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:24.432", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 176 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 75, + "user_id": 54351, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.459Z DELETE /posts/45326 [204] 75ms", + "timestamp": "2022-09-26 14:12:24.459", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 177 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.477Z GET /posts/45326/edit [200] 50ms", + "timestamp": "2022-09-26 14:12:24.477", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 178 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 428, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.504Z GET /posts/45326 [200] 428ms", + "timestamp": "2022-09-26 14:12:24.504", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 179 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.525Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:24.525", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 180 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.547Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:24.547", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 181 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.570Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:24.570", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 182 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 149, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.595Z POST /posts [201] 149ms", + "timestamp": "2022-09-26 14:12:24.595", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 183 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.614Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:24.614", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 184 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.632Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:24.632", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 185 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 59, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.663Z DELETE /posts/45326 [204] 59ms", + "timestamp": "2022-09-26 14:12:24.663", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 186 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.692Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:24.692", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 187 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.715Z GET /posts/45326 [200] 50ms", + "timestamp": "2022-09-26 14:12:24.715", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 188 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.734Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:24.734", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 189 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.762Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:24.762", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 190 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.793Z GET /posts/45326 [200] 36ms", + "timestamp": "2022-09-26 14:12:24.793", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 191 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.824Z GET /login [200] 38ms", + "timestamp": "2022-09-26 14:12:24.824", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 192 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.854Z GET /posts/45326/edit [200] 51ms", + "timestamp": "2022-09-26 14:12:24.854", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 193 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.875Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:24.875", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 194 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.903Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:24.903", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 195 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.928Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:24.928", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 196 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.946Z GET /login [200] 45ms", + "timestamp": "2022-09-26 14:12:24.946", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 197 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:24.974Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:24.974", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 198 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.003Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:25.003", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 199 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.022Z GET /login [200] 52ms", + "timestamp": "2022-09-26 14:12:25.022", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 200 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 50, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.048Z GET /login [200] 50ms", + "timestamp": "2022-09-26 14:12:25.048", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 201 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.079Z GET /posts/45326 [200] 52ms", + "timestamp": "2022-09-26 14:12:25.079", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 202 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.110Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:25.110", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 203 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.134Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:25.134", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 204 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.161Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:25.161", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 205 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 68, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.194Z DELETE /posts [204] 68ms", + "timestamp": "2022-09-26 14:12:25.194", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 206 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.224Z GET /posts/45326 [200] 64ms", + "timestamp": "2022-09-26 14:12:25.224", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 207 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.249Z GET /login [200] 55ms", + "timestamp": "2022-09-26 14:12:25.249", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 208 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.279Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:25.279", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 209 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.301Z GET /users [200] 44ms", + "timestamp": "2022-09-26 14:12:25.301", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 210 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.331Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:25.331", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 211 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.356Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:25.356", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 212 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.375Z GET /posts/45326 [200] 55ms", + "timestamp": "2022-09-26 14:12:25.375", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 213 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.402Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:25.402", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 214 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.433Z GET /posts/45326 [200] 58ms", + "timestamp": "2022-09-26 14:12:25.433", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 215 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.461Z GET /posts/45326 [200] 56ms", + "timestamp": "2022-09-26 14:12:25.461", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 216 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "POST", + "took_ms": 126, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.489Z POST /login [201] 126ms", + "timestamp": "2022-09-26 14:12:25.489", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 217 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.508Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:25.508", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 218 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.534Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:25.534", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 219 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.559Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:25.559", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 220 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.588Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:25.588", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 221 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.617Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:25.617", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 222 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 9001, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.639Z GET /posts/45326/edit [200] 54ms", + "timestamp": "2022-09-26 14:12:25.639", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 223 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.659Z GET /posts/45326 [200] 63ms", + "timestamp": "2022-09-26 14:12:25.659", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 224 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.679Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:25.679", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 225 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.701Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:25.701", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 226 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.723Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:25.723", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 227 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.749Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:25.749", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 228 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.774Z DELETE /posts [204] 63ms", + "timestamp": "2022-09-26 14:12:25.774", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 229 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 128, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.799Z POST /posts [201] 128ms", + "timestamp": "2022-09-26 14:12:25.799", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 230 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 65, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.821Z DELETE /posts [204] 65ms", + "timestamp": "2022-09-26 14:12:25.821", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 231 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.843Z GET /posts/45326 [500] 36ms", + "timestamp": "2022-09-26 14:12:25.843", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 232 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.864Z GET /login [200] 59ms", + "timestamp": "2022-09-26 14:12:25.864", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 233 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.885Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:25.885", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 234 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.907Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:25.907", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 235 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.926Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:25.926", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 236 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.946Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:25.946", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 237 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.971Z GET /posts/45326 [200] 63ms", + "timestamp": "2022-09-26 14:12:25.971", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 238 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:25.991Z GET /posts/45326 [200] 55ms", + "timestamp": "2022-09-26 14:12:25.991", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 239 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.014Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:26.014", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 240 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.036Z GET /login [200] 36ms", + "timestamp": "2022-09-26 14:12:26.036", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 241 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 161, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.066Z POST /posts [201] 161ms", + "timestamp": "2022-09-26 14:12:26.066", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 242 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.095Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:26.095", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 243 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.124Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:26.124", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 244 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.145Z GET /users [200] 39ms", + "timestamp": "2022-09-26 14:12:26.145", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 245 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 126, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.174Z PUT /posts [200] 126ms", + "timestamp": "2022-09-26 14:12:26.174", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 246 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "POST", + "took_ms": 129, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.205Z POST /users [201] 129ms", + "timestamp": "2022-09-26 14:12:26.205", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 247 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.237Z GET /posts/45326 [200] 38ms", + "timestamp": "2022-09-26 14:12:26.237", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 248 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.263Z GET /posts/45326 [200] 62ms", + "timestamp": "2022-09-26 14:12:26.263", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 249 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.292Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:26.292", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 250 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.323Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:26.323", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 251 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.344Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:26.344", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 252 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.363Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:26.363", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 253 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 144, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.381Z POST /posts [201] 144ms", + "timestamp": "2022-09-26 14:12:26.381", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 254 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.412Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:26.412", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 255 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.437Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:26.437", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 256 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "DELETE", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.456Z DELETE /users [204] 50ms", + "timestamp": "2022-09-26 14:12:26.456", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 257 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.484Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:26.484", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 258 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.508Z GET /login [200] 49ms", + "timestamp": "2022-09-26 14:12:26.508", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 259 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.535Z GET /posts/45326 [200] 50ms", + "timestamp": "2022-09-26 14:12:26.535", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 260 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.561Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:26.561", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 261 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.588Z GET /login [200] 62ms", + "timestamp": "2022-09-26 14:12:26.588", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 262 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.606Z GET /login [200] 50ms", + "timestamp": "2022-09-26 14:12:26.606", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 263 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.629Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:26.629", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 264 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.656Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:26.656", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 265 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.685Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:26.685", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 266 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 104, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.705Z PUT /posts [200] 104ms", + "timestamp": "2022-09-26 14:12:26.705", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 267 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "DELETE", + "took_ms": 63, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.734Z DELETE /login [204] 63ms", + "timestamp": "2022-09-26 14:12:26.734", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 268 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.753Z GET /posts/45326 [200] 58ms", + "timestamp": "2022-09-26 14:12:26.753", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 269 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 62, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.776Z GET /login [200] 62ms", + "timestamp": "2022-09-26 14:12:26.776", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 270 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 54, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.796Z GET /login [200] 54ms", + "timestamp": "2022-09-26 14:12:26.796", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 271 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 101, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.827Z DELETE /posts/45326 [204] 101ms", + "timestamp": "2022-09-26 14:12:26.827", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 272 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 65, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.845Z GET /login [200] 65ms", + "timestamp": "2022-09-26 14:12:26.845", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 273 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.871Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:26.871", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 274 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.894Z GET /posts/45326 [200] 44ms", + "timestamp": "2022-09-26 14:12:26.894", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 275 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.922Z GET /users [200] 57ms", + "timestamp": "2022-09-26 14:12:26.922", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 276 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 61, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.948Z GET /login [200] 61ms", + "timestamp": "2022-09-26 14:12:26.948", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 277 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.966Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:26.966", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 278 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:26.996Z GET /login [200] 59ms", + "timestamp": "2022-09-26 14:12:26.996", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 279 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.017Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:27.017", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 280 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.040Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:27.040", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 281 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.067Z GET /posts/45326 [200] 40ms", + "timestamp": "2022-09-26 14:12:27.067", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 282 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.097Z GET /posts/45326/edit [200] 43ms", + "timestamp": "2022-09-26 14:12:27.097", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 283 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.116Z GET /posts/45326 [200] 54ms", + "timestamp": "2022-09-26 14:12:27.116", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 284 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.147Z GET /posts/45326 [200] 62ms", + "timestamp": "2022-09-26 14:12:27.147", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 285 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.172Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:27.172", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 286 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.197Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:27.197", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 287 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.219Z GET /posts/45326 [200] 57ms", + "timestamp": "2022-09-26 14:12:27.219", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 288 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 53, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.249Z DELETE /posts/45326 [204] 53ms", + "timestamp": "2022-09-26 14:12:27.249", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 289 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.278Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:27.278", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 290 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.303Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:27.303", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 291 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.325Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:27.325", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 292 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.356Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:27.356", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 293 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.379Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:27.379", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 294 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 68, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.401Z DELETE /posts [204] 68ms", + "timestamp": "2022-09-26 14:12:27.401", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 295 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.428Z GET /posts/45326 [200] 42ms", + "timestamp": "2022-09-26 14:12:27.428", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 296 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.455Z GET /users [200] 59ms", + "timestamp": "2022-09-26 14:12:27.455", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 297 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.475Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:27.475", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 298 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.503Z GET /login [200] 57ms", + "timestamp": "2022-09-26 14:12:27.503", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 299 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.533Z GET /login [200] 55ms", + "timestamp": "2022-09-26 14:12:27.533", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 300 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.562Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:27.562", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 301 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.591Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:27.591", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 302 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 48, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.624Z GET /login [200] 48ms", + "timestamp": "2022-09-26 14:12:27.624", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 303 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.650Z GET /posts/45326 [200] 54ms", + "timestamp": "2022-09-26 14:12:27.650", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 304 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.674Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:27.674", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 305 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.702Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:27.702", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 306 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.723Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:27.723", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 307 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.743Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:27.743", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 308 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.767Z GET /login [200] 38ms", + "timestamp": "2022-09-26 14:12:27.767", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 309 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.793Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:27.793", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 310 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.823Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:27.823", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 311 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.850Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:27.850", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 312 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.875Z GET /login [200] 54ms", + "timestamp": "2022-09-26 14:12:27.875", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 313 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.907Z GET /posts/45326 [200] 44ms", + "timestamp": "2022-09-26 14:12:27.907", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 314 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.932Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:27.932", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 315 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 164, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.962Z POST /posts [201] 164ms", + "timestamp": "2022-09-26 14:12:27.962", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 316 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:27.991Z GET /posts/45326 [200] 36ms", + "timestamp": "2022-09-26 14:12:27.991", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 317 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.016Z GET /posts/45326 [200] 55ms", + "timestamp": "2022-09-26 14:12:28.016", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 318 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.049Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:28.049", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 319 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.067Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:28.067", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 320 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.092Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:28.092", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 321 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.122Z GET /login [200] 56ms", + "timestamp": "2022-09-26 14:12:28.122", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 322 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.153Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:28.153", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 323 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.172Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:28.172", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 324 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.197Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:28.197", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 325 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 152, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.228Z POST /posts [201] 152ms", + "timestamp": "2022-09-26 14:12:28.228", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 326 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.256Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:28.256", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 327 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.282Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:28.282", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 328 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 57, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.313Z GET /login [200] 57ms", + "timestamp": "2022-09-26 14:12:28.313", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 329 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.331Z GET /posts/45326 [200] 40ms", + "timestamp": "2022-09-26 14:12:28.331", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 330 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.350Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:28.350", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 331 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.370Z DELETE /posts [204] 64ms", + "timestamp": "2022-09-26 14:12:28.370", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 332 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.393Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:28.393", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 333 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.420Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:28.420", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 334 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 52, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.446Z GET /users [200] 52ms", + "timestamp": "2022-09-26 14:12:28.446", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 335 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.469Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:28.469", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 336 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 101, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.493Z DELETE /posts/45326 [204] 101ms", + "timestamp": "2022-09-26 14:12:28.493", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 337 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.525Z GET /posts/45326 [200] 43ms", + "timestamp": "2022-09-26 14:12:28.525", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 338 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 140, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.552Z POST /posts/45326 [201] 140ms", + "timestamp": "2022-09-26 14:12:28.552", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 339 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.582Z GET /posts/45326 [200] 65ms", + "timestamp": "2022-09-26 14:12:28.582", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 340 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.605Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:28.605", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 341 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.635Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:28.635", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 342 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.659Z GET /posts/45326/edit [200] 46ms", + "timestamp": "2022-09-26 14:12:28.659", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 343 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.678Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:28.678", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 344 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 37, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.706Z GET /users [200] 37ms", + "timestamp": "2022-09-26 14:12:28.706", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 345 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.733Z GET /posts/45326 [200] 36ms", + "timestamp": "2022-09-26 14:12:28.733", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 346 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.757Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:28.757", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 347 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.788Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:28.788", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 348 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.811Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:28.811", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 349 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.829Z GET /users [200] 53ms", + "timestamp": "2022-09-26 14:12:28.829", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 350 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.858Z GET /posts/45326 [200] 48ms", + "timestamp": "2022-09-26 14:12:28.858", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 351 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 97, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.881Z PUT /posts [200] 97ms", + "timestamp": "2022-09-26 14:12:28.881", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 352 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 94, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.903Z PUT /posts [200] 94ms", + "timestamp": "2022-09-26 14:12:28.903", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 353 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.935Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:28.935", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 354 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.967Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:28.967", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 355 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:28.994Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:28.994", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 356 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.016Z GET /posts/45326 [200] 56ms", + "timestamp": "2022-09-26 14:12:29.016", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 357 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.036Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:29.036", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 358 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.062Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:29.062", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 359 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.085Z GET /login [200] 60ms", + "timestamp": "2022-09-26 14:12:29.085", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 360 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 99, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.112Z PUT /posts [200] 99ms", + "timestamp": "2022-09-26 14:12:29.112", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 361 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.140Z GET /posts/45326 [200] 52ms", + "timestamp": "2022-09-26 14:12:29.140", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 362 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.169Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:29.169", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 363 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.197Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:29.197", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 364 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.219Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:29.219", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 365 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.239Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:29.239", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 366 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.261Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:29.261", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 367 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.291Z GET /posts/45326 [200] 58ms", + "timestamp": "2022-09-26 14:12:29.291", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 368 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.312Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:29.312", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 369 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.331Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:29.331", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 370 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.355Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:29.355", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 371 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.376Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:29.376", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 372 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.403Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:29.403", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 373 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 121, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.433Z PUT /posts [200] 121ms", + "timestamp": "2022-09-26 14:12:29.433", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 374 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.454Z GET /login [200] 61ms", + "timestamp": "2022-09-26 14:12:29.454", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 375 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.486Z GET /posts/45326 [200] 64ms", + "timestamp": "2022-09-26 14:12:29.486", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 376 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.510Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:29.510", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 377 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.541Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:29.541", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 378 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.565Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:29.565", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 379 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.585Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:29.585", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 380 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.604Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:29.604", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 381 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.624Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:29.624", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 382 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.646Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:29.646", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 383 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.676Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:29.676", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 384 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.698Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:29.698", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 385 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.725Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:29.725", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 386 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.750Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:29.750", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 387 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.771Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:29.771", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 388 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.800Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:29.800", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 389 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.832Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:29.832", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 390 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.851Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:29.851", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 391 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.876Z GET /login [200] 57ms", + "timestamp": "2022-09-26 14:12:29.876", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 392 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.902Z GET /login [200] 37ms", + "timestamp": "2022-09-26 14:12:29.902", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 393 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.928Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:29.928", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 394 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.960Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:29.960", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 395 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:29.992Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:29.992", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 396 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.018Z GET /login [200] 64ms", + "timestamp": "2022-09-26 14:12:30.018", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 397 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.037Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:30.037", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 398 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.060Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:30.060", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 399 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 54, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.086Z GET /login [200] 54ms", + "timestamp": "2022-09-26 14:12:30.086", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 400 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.105Z GET /posts/45326 [200] 64ms", + "timestamp": "2022-09-26 14:12:30.105", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 401 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.131Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:30.131", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 402 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.159Z GET /login [200] 62ms", + "timestamp": "2022-09-26 14:12:30.159", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 403 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.190Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:30.190", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 404 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.218Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:30.218", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 405 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.248Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:30.248", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 406 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.275Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:30.275", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 407 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.297Z GET /posts/45326 [200] 57ms", + "timestamp": "2022-09-26 14:12:30.297", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 408 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.320Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:30.320", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 409 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.345Z DELETE /posts [204] 55ms", + "timestamp": "2022-09-26 14:12:30.345", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 410 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.377Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:30.377", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 411 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.404Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:30.404", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 412 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.422Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:30.422", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 413 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 128, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.451Z PUT /posts [200] 128ms", + "timestamp": "2022-09-26 14:12:30.451", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 414 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.476Z GET /login [200] 40ms", + "timestamp": "2022-09-26 14:12:30.476", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 415 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.506Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:30.506", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 416 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.537Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:30.537", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 417 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 79, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.564Z PUT /posts [200] 79ms", + "timestamp": "2022-09-26 14:12:30.564", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 418 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.583Z GET /posts/45326 [200] 53ms", + "timestamp": "2022-09-26 14:12:30.583", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 419 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.615Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:30.615", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 420 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.637Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:30.637", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 421 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.668Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:30.668", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 422 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.695Z GET /login [200] 45ms", + "timestamp": "2022-09-26 14:12:30.695", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 423 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 100, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.720Z PUT /posts [200] 100ms", + "timestamp": "2022-09-26 14:12:30.720", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 424 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.748Z GET /login [200] 60ms", + "timestamp": "2022-09-26 14:12:30.748", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 425 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.766Z GET /posts/45326 [200] 45ms", + "timestamp": "2022-09-26 14:12:30.766", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 426 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.791Z GET /users [200] 63ms", + "timestamp": "2022-09-26 14:12:30.791", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 427 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.814Z GET /posts/45326 [200] 50ms", + "timestamp": "2022-09-26 14:12:30.814", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 428 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.838Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:30.838", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 429 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.862Z GET /posts/45326 [200] 44ms", + "timestamp": "2022-09-26 14:12:30.862", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 430 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.892Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:30.892", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 431 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.916Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:30.916", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 432 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.947Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:30.947", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 433 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.974Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:30.974", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 434 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:30.994Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:30.994", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 435 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.013Z GET /login [200] 48ms", + "timestamp": "2022-09-26 14:12:31.013", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 436 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.042Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:31.042", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 437 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "DELETE", + "took_ms": 72, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.068Z DELETE /users [204] 72ms", + "timestamp": "2022-09-26 14:12:31.068", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 438 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.096Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:31.096", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 439 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.121Z GET /posts/45326/edit [200] 59ms", + "timestamp": "2022-09-26 14:12:31.121", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 440 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.140Z GET /login [200] 41ms", + "timestamp": "2022-09-26 14:12:31.140", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 441 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.159Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:31.159", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 442 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 54351, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.186Z GET /posts/45326 [200] 49ms", + "timestamp": "2022-09-26 14:12:31.186", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 443 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.206Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:31.206", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 444 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.235Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:31.235", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 445 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.259Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:31.259", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 446 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.281Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:31.281", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 447 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.303Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:31.303", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 448 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.331Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:31.331", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 449 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.358Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:31.358", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 450 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.390Z GET /posts/45326 [200] 44ms", + "timestamp": "2022-09-26 14:12:31.390", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 451 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.417Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:31.417", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 452 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 116, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.446Z PUT /posts [200] 116ms", + "timestamp": "2022-09-26 14:12:31.446", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 453 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.465Z GET /login [200] 62ms", + "timestamp": "2022-09-26 14:12:31.465", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 454 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.496Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:31.496", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 455 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.528Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:31.528", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 456 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.549Z GET /login [200] 37ms", + "timestamp": "2022-09-26 14:12:31.549", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 457 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.575Z GET /posts/45326 [200] 64ms", + "timestamp": "2022-09-26 14:12:31.575", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 458 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.595Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:31.595", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 459 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.621Z GET /login [200] 63ms", + "timestamp": "2022-09-26 14:12:31.621", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 460 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.650Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:31.650", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 461 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.676Z GET /users [200] 57ms", + "timestamp": "2022-09-26 14:12:31.676", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 462 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.701Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:31.701", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 463 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 60, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.732Z GET /users [200] 60ms", + "timestamp": "2022-09-26 14:12:31.732", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 464 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.758Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:31.758", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 465 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.784Z GET /login [200] 58ms", + "timestamp": "2022-09-26 14:12:31.784", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 466 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.808Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:31.808", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 467 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 38, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.833Z GET /login [200] 38ms", + "timestamp": "2022-09-26 14:12:31.833", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 468 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 54, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.858Z GET /users [200] 54ms", + "timestamp": "2022-09-26 14:12:31.858", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 469 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.884Z GET /users [200] 45ms", + "timestamp": "2022-09-26 14:12:31.884", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 470 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.903Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:31.903", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 471 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.925Z GET /posts/45326 [200] 48ms", + "timestamp": "2022-09-26 14:12:31.925", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 472 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.956Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:31.956", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 473 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:31.986Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:31.986", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 474 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 162, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.006Z POST /posts [201] 162ms", + "timestamp": "2022-09-26 14:12:32.006", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 475 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.037Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:32.037", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 476 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.063Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:32.063", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 477 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.095Z GET /posts/45326 [200] 45ms", + "timestamp": "2022-09-26 14:12:32.095", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 478 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.119Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:32.119", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 479 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.143Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:32.143", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 480 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.164Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:32.164", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 481 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.187Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:32.187", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 482 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.207Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:32.207", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 483 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.237Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:32.237", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 484 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 40, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.269Z GET /login [200] 40ms", + "timestamp": "2022-09-26 14:12:32.269", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 485 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.289Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:32.289", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 486 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "DELETE", + "took_ms": 52, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.308Z DELETE /users [204] 52ms", + "timestamp": "2022-09-26 14:12:32.308", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 487 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.339Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:32.339", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 488 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.366Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:32.366", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 489 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.395Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:32.395", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 490 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 126, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.419Z PUT /posts [200] 126ms", + "timestamp": "2022-09-26 14:12:32.419", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 491 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.446Z GET /login [200] 50ms", + "timestamp": "2022-09-26 14:12:32.446", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 492 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.476Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:32.476", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 493 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 73, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.505Z DELETE /posts/45326 [500] 73ms", + "timestamp": "2022-09-26 14:12:32.505", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 494 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 125, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.536Z PUT /posts/45326 [200] 125ms", + "timestamp": "2022-09-26 14:12:32.536", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 495 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.567Z GET /posts/45326/edit [200] 55ms", + "timestamp": "2022-09-26 14:12:32.567", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 496 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.592Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:32.592", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 497 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.612Z GET /users [200] 60ms", + "timestamp": "2022-09-26 14:12:32.612", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 498 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 50, + "user_id": 54351, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.644Z GET /login [200] 50ms", + "timestamp": "2022-09-26 14:12:32.644", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 499 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.671Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:32.671", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 500 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 57, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.699Z DELETE /posts [204] 57ms", + "timestamp": "2022-09-26 14:12:32.699", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 501 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 162, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.718Z POST /posts [201] 162ms", + "timestamp": "2022-09-26 14:12:32.718", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 502 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.743Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:32.743", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 503 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 9001, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.774Z GET /posts/45326/edit [200] 63ms", + "timestamp": "2022-09-26 14:12:32.774", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 504 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 138, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.803Z POST /posts/45326/edit [201] 138ms", + "timestamp": "2022-09-26 14:12:32.803", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 505 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.828Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:32.828", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 506 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.855Z GET /login [200] 52ms", + "timestamp": "2022-09-26 14:12:32.855", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 507 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.880Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:32.880", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 508 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.912Z GET /posts/45326 [200] 63ms", + "timestamp": "2022-09-26 14:12:32.912", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 509 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.939Z GET /login [200] 48ms", + "timestamp": "2022-09-26 14:12:32.939", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 510 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.970Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:32.970", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 511 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:32.996Z GET /posts/45326 [200] 38ms", + "timestamp": "2022-09-26 14:12:32.996", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 512 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 55, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.017Z GET /login [200] 55ms", + "timestamp": "2022-09-26 14:12:33.017", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 513 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.042Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:33.042", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 514 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.070Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:33.070", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 515 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.098Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:33.098", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 516 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 77, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.130Z DELETE /posts [204] 77ms", + "timestamp": "2022-09-26 14:12:33.130", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 517 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.161Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:33.161", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 518 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.188Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:33.188", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 519 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "PUT", + "took_ms": 119, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.208Z PUT /login [200] 119ms", + "timestamp": "2022-09-26 14:12:33.208", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 520 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.240Z GET /posts/45326 [200] 44ms", + "timestamp": "2022-09-26 14:12:33.240", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 521 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.265Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:33.265", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 522 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.289Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:33.289", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 523 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.314Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:33.314", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 524 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 171, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.345Z POST /posts/45326 [201] 171ms", + "timestamp": "2022-09-26 14:12:33.345", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 525 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.367Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:33.367", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 526 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 92, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.395Z PUT /posts [200] 92ms", + "timestamp": "2022-09-26 14:12:33.395", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 527 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.423Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:33.423", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 528 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.447Z GET /posts/45326/edit [200] 65ms", + "timestamp": "2022-09-26 14:12:33.447", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 529 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.466Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:33.466", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 530 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.485Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:33.485", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 531 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.507Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:33.507", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 532 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 65, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.531Z DELETE /posts/45326 [204] 65ms", + "timestamp": "2022-09-26 14:12:33.531", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 533 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.556Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:33.556", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 534 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.587Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:33.587", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 535 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.611Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:33.611", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 536 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 77, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.629Z PUT /posts [200] 77ms", + "timestamp": "2022-09-26 14:12:33.629", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 537 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 37, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.654Z GET /login [200] 37ms", + "timestamp": "2022-09-26 14:12:33.654", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 538 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.674Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:33.674", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 539 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 74, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.706Z PUT /posts/45326 [200] 74ms", + "timestamp": "2022-09-26 14:12:33.706", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 540 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 85, + "user_id": 9001, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.730Z DELETE /posts/45326/edit [204] 85ms", + "timestamp": "2022-09-26 14:12:33.730", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 541 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 52, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.753Z GET /login [200] 52ms", + "timestamp": "2022-09-26 14:12:33.753", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 542 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.777Z GET /login [200] 47ms", + "timestamp": "2022-09-26 14:12:33.777", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 543 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.801Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:33.801", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 544 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.832Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:33.832", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 545 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.863Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:33.863", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 546 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.888Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:33.888", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 547 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.914Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:33.914", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 548 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.942Z GET /posts [200] 65ms", + "timestamp": "2022-09-26 14:12:33.942", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 549 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 168, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.967Z POST /posts/45326 [201] 168ms", + "timestamp": "2022-09-26 14:12:33.967", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 550 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:33.997Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:33.997", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 551 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.020Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:34.020", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 552 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.052Z GET /login [200] 57ms", + "timestamp": "2022-09-26 14:12:34.052", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 553 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.072Z GET /login [200] 41ms", + "timestamp": "2022-09-26 14:12:34.072", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 554 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.095Z GET /login [200] 58ms", + "timestamp": "2022-09-26 14:12:34.095", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 555 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.124Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:34.124", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 556 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 42, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.149Z GET /login [200] 42ms", + "timestamp": "2022-09-26 14:12:34.149", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 557 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 46, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.181Z GET /users [200] 46ms", + "timestamp": "2022-09-26 14:12:34.181", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 558 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.209Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:34.209", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 559 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.228Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:34.228", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 560 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 53, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.256Z GET /users [200] 53ms", + "timestamp": "2022-09-26 14:12:34.256", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 561 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.283Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:34.283", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 562 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.313Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:34.313", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 563 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.338Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:34.338", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 564 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.362Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:34.362", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 565 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.389Z GET /posts/45326 [200] 57ms", + "timestamp": "2022-09-26 14:12:34.389", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 566 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.409Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:34.409", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 567 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.439Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:34.439", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 568 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.460Z GET /posts/45326 [200] 43ms", + "timestamp": "2022-09-26 14:12:34.460", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 569 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.491Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:34.491", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 570 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.509Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:34.509", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 571 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.528Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:34.528", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 572 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.557Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:34.557", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 573 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.582Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:34.582", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 574 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 41, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.605Z GET /users [200] 41ms", + "timestamp": "2022-09-26 14:12:34.605", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 575 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.625Z GET /login [200] 64ms", + "timestamp": "2022-09-26 14:12:34.625", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 576 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.655Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:34.655", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 577 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.679Z GET /users [200] 57ms", + "timestamp": "2022-09-26 14:12:34.679", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 578 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.701Z GET /login [200] 56ms", + "timestamp": "2022-09-26 14:12:34.701", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 579 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.722Z GET /posts/45326 [200] 37ms", + "timestamp": "2022-09-26 14:12:34.722", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 580 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.741Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:34.741", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 581 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.771Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:34.771", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 582 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.796Z GET /login [200] 48ms", + "timestamp": "2022-09-26 14:12:34.796", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 583 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.826Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:34.826", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 584 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 46, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.855Z GET /login [200] 46ms", + "timestamp": "2022-09-26 14:12:34.855", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 585 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.874Z GET /login [200] 43ms", + "timestamp": "2022-09-26 14:12:34.874", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 586 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.907Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:34.907", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 587 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.928Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:34.928", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 588 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.947Z GET /login [200] 49ms", + "timestamp": "2022-09-26 14:12:34.947", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 589 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.965Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:34.965", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 590 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:34.990Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:34.990", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 591 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 150, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.020Z POST /posts/45326 [201] 150ms", + "timestamp": "2022-09-26 14:12:35.020", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 592 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.040Z GET /login [200] 43ms", + "timestamp": "2022-09-26 14:12:35.040", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 593 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.064Z GET /login [200] 56ms", + "timestamp": "2022-09-26 14:12:35.064", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 594 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.095Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:35.095", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 595 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.127Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:35.127", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 596 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.153Z GET /login [200] 54ms", + "timestamp": "2022-09-26 14:12:35.153", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 597 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.185Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:35.185", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 598 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 372, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.217Z GET /posts [500] 372ms", + "timestamp": "2022-09-26 14:12:35.217", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 599 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.245Z GET /login [200] 60ms", + "timestamp": "2022-09-26 14:12:35.245", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 600 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.263Z GET /users [200] 59ms", + "timestamp": "2022-09-26 14:12:35.263", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 601 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.284Z GET /users [200] 38ms", + "timestamp": "2022-09-26 14:12:35.284", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 602 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 58, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.309Z GET /login [200] 58ms", + "timestamp": "2022-09-26 14:12:35.309", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 603 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "PUT", + "took_ms": 100, + "user_id": 54351, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.328Z PUT /login [200] 100ms", + "timestamp": "2022-09-26 14:12:35.328", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 604 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "PUT", + "took_ms": 119, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.354Z PUT /login [200] 119ms", + "timestamp": "2022-09-26 14:12:35.354", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 605 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.386Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:35.386", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 606 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.414Z GET /login [200] 58ms", + "timestamp": "2022-09-26 14:12:35.414", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 607 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.446Z GET /posts/45326 [200] 57ms", + "timestamp": "2022-09-26 14:12:35.446", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 608 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.465Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:35.465", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 609 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.498Z GET /posts/45326 [200] 48ms", + "timestamp": "2022-09-26 14:12:35.498", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 610 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.527Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:35.527", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 611 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.550Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:35.550", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 612 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.579Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:35.579", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 613 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.608Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:35.608", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 614 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 102, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.640Z DELETE /posts [204] 102ms", + "timestamp": "2022-09-26 14:12:35.640", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 615 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.658Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:35.658", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 616 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.690Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:35.690", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 617 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.714Z GET /posts [500] 59ms", + "timestamp": "2022-09-26 14:12:35.714", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 618 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.741Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:35.741", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 619 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 100, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.761Z PUT /posts [200] 100ms", + "timestamp": "2022-09-26 14:12:35.761", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 620 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.784Z GET /posts/45326/edit [200] 46ms", + "timestamp": "2022-09-26 14:12:35.784", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 621 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.804Z GET /posts/45326/edit [200] 39ms", + "timestamp": "2022-09-26 14:12:35.804", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 622 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 149, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.832Z POST /posts [201] 149ms", + "timestamp": "2022-09-26 14:12:35.832", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 623 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.860Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:35.860", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 624 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.882Z GET /login [200] 63ms", + "timestamp": "2022-09-26 14:12:35.882", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 625 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.902Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:35.902", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 626 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "PUT", + "took_ms": 88, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.925Z PUT /login [200] 88ms", + "timestamp": "2022-09-26 14:12:35.925", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 627 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.957Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:35.957", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 628 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 57, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:35.981Z DELETE /posts [204] 57ms", + "timestamp": "2022-09-26 14:12:35.981", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 629 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.005Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:36.005", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 630 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.032Z GET /posts/45326 [200] 54ms", + "timestamp": "2022-09-26 14:12:36.032", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 631 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.060Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:36.060", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 632 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 73, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.079Z DELETE /posts/45326 [204] 73ms", + "timestamp": "2022-09-26 14:12:36.079", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 633 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.099Z GET /posts/45326 [200] 63ms", + "timestamp": "2022-09-26 14:12:36.099", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 634 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.121Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:36.121", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 635 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.149Z GET /posts/45326 [200] 53ms", + "timestamp": "2022-09-26 14:12:36.149", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 636 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.180Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:36.180", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 637 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.208Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:36.208", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 638 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.234Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:36.234", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 639 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 138, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.256Z POST /posts [201] 138ms", + "timestamp": "2022-09-26 14:12:36.256", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 640 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.287Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:36.287", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 641 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 5150, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.315Z POST /posts [504] 5150ms", + "timestamp": "2022-09-26 14:12:36.315", + "http_response_code": 504, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 642 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.343Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:36.343", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 643 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.364Z GET /posts [500] 61ms", + "timestamp": "2022-09-26 14:12:36.364", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 644 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.387Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:36.387", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 645 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.416Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:36.416", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 646 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.444Z GET /posts/45326 [200] 48ms", + "timestamp": "2022-09-26 14:12:36.444", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 647 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.477Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:36.477", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 648 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.508Z GET /posts/45326/edit [200] 56ms", + "timestamp": "2022-09-26 14:12:36.508", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 649 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 50, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.533Z GET /users [200] 50ms", + "timestamp": "2022-09-26 14:12:36.533", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 650 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.562Z GET /posts/45326 [200] 63ms", + "timestamp": "2022-09-26 14:12:36.562", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 651 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.594Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:36.594", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 652 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.624Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:36.624", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 653 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 97, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.650Z PUT /posts [200] 97ms", + "timestamp": "2022-09-26 14:12:36.650", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 654 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.681Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:36.681", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 655 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "POST", + "took_ms": 147, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.709Z POST /users [201] 147ms", + "timestamp": "2022-09-26 14:12:36.709", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 656 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.730Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:36.730", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 657 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.761Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:36.761", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 658 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.786Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:36.786", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 659 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.810Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:36.810", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 660 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.840Z GET /login [200] 54ms", + "timestamp": "2022-09-26 14:12:36.840", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 661 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.865Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:36.865", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 662 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.892Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:36.892", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 663 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 51, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.911Z GET /users [200] 51ms", + "timestamp": "2022-09-26 14:12:36.911", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 664 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.941Z GET /posts [500] 42ms", + "timestamp": "2022-09-26 14:12:36.941", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 665 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.960Z GET /posts/45326 [200] 42ms", + "timestamp": "2022-09-26 14:12:36.960", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 666 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:36.989Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:36.989", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 667 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.009Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:37.009", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 668 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.030Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:37.030", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 669 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.061Z GET /users [200] 55ms", + "timestamp": "2022-09-26 14:12:37.061", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 670 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 176, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.089Z POST /posts [201] 176ms", + "timestamp": "2022-09-26 14:12:37.089", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 671 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.116Z GET /users [200] 62ms", + "timestamp": "2022-09-26 14:12:37.116", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 672 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.144Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:37.144", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 673 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.166Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:37.166", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 674 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.193Z GET /posts/45326 [200] 53ms", + "timestamp": "2022-09-26 14:12:37.193", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 675 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.217Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:37.217", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 676 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.248Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:37.248", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 677 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 95, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.268Z DELETE /posts/45326 [204] 95ms", + "timestamp": "2022-09-26 14:12:37.268", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 678 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.296Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:37.296", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 679 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.318Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:37.318", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 680 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.341Z GET /posts/45326/edit [200] 54ms", + "timestamp": "2022-09-26 14:12:37.341", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 681 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.361Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:37.361", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 682 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.383Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:37.383", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 683 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.407Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:37.407", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 684 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.431Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:37.431", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 685 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.454Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:37.454", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 686 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.485Z GET /posts/45326/edit [200] 40ms", + "timestamp": "2022-09-26 14:12:37.485", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 687 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.513Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:37.513", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 688 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.538Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:37.538", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 689 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.565Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:37.565", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 690 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.589Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:37.589", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 691 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 49, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.614Z GET /login [200] 49ms", + "timestamp": "2022-09-26 14:12:37.614", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 692 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.633Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:37.633", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 693 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.653Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:37.653", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 694 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.686Z GET /login [200] 38ms", + "timestamp": "2022-09-26 14:12:37.686", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 695 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.709Z GET /posts/45326 [200] 61ms", + "timestamp": "2022-09-26 14:12:37.709", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 696 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.738Z GET /users [200] 38ms", + "timestamp": "2022-09-26 14:12:37.738", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 697 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.765Z GET /posts/45326 [200] 42ms", + "timestamp": "2022-09-26 14:12:37.765", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 698 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.792Z GET /posts/45326 [200] 52ms", + "timestamp": "2022-09-26 14:12:37.792", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 699 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.821Z GET /users [200] 42ms", + "timestamp": "2022-09-26 14:12:37.821", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 700 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.841Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:37.841", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 701 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.869Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:37.869", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 702 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.892Z GET /login [200] 53ms", + "timestamp": "2022-09-26 14:12:37.892", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 703 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.912Z GET /login [200] 43ms", + "timestamp": "2022-09-26 14:12:37.912", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 704 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.938Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:37.938", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 705 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.958Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:37.958", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 706 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:37.985Z GET /login [200] 63ms", + "timestamp": "2022-09-26 14:12:37.985", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 707 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.015Z GET /posts/45326 [200] 38ms", + "timestamp": "2022-09-26 14:12:38.015", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 708 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.037Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:38.037", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 709 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 72, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.060Z PUT /posts [200] 72ms", + "timestamp": "2022-09-26 14:12:38.060", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 710 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.080Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:38.080", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 711 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "POST", + "took_ms": 125, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.110Z POST /login [201] 125ms", + "timestamp": "2022-09-26 14:12:38.110", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 712 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 110, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.128Z PUT /posts [200] 110ms", + "timestamp": "2022-09-26 14:12:38.128", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 713 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.154Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:38.154", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 714 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.175Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:38.175", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 715 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.199Z GET /posts [200] 65ms", + "timestamp": "2022-09-26 14:12:38.199", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 716 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.230Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:38.230", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 717 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.255Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:38.255", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 718 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.280Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:38.280", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 719 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.299Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:38.299", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 720 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 173, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.320Z POST /posts [201] 173ms", + "timestamp": "2022-09-26 14:12:38.320", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 721 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.338Z GET /login [200] 50ms", + "timestamp": "2022-09-26 14:12:38.338", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 722 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.361Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:38.361", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 723 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.393Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:38.393", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 724 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.419Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:38.419", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 725 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.441Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:38.441", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 726 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.465Z GET /users [200] 42ms", + "timestamp": "2022-09-26 14:12:38.465", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 727 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.484Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:38.484", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 728 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.510Z GET /posts [200] 61ms", + "timestamp": "2022-09-26 14:12:38.510", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 729 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 37, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.540Z GET /users [200] 37ms", + "timestamp": "2022-09-26 14:12:38.540", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 730 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.570Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:38.570", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 731 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 79, + "user_id": 54351, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.599Z PUT /posts/45326 [200] 79ms", + "timestamp": "2022-09-26 14:12:38.599", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 732 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.622Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:38.622", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 733 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.642Z GET /posts/45326 [200] 54ms", + "timestamp": "2022-09-26 14:12:38.642", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 734 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.662Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:38.662", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 735 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.691Z GET /login [200] 60ms", + "timestamp": "2022-09-26 14:12:38.691", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 736 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.714Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:38.714", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 737 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.745Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:38.745", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 738 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.769Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:38.769", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 739 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.800Z GET /posts/45326 [200] 42ms", + "timestamp": "2022-09-26 14:12:38.800", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 740 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.828Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:38.828", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 741 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.847Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:38.847", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 742 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.869Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:38.869", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 743 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.889Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:38.889", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 744 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.908Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:38.908", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 745 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.936Z GET /posts/45326 [200] 44ms", + "timestamp": "2022-09-26 14:12:38.936", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 746 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.954Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:38.954", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 747 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:38.981Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:38.981", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 748 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.011Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:39.011", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 749 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.031Z GET /login [200] 46ms", + "timestamp": "2022-09-26 14:12:39.031", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 750 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.050Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:39.050", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 751 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.078Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:39.078", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 752 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 72, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.099Z PUT /posts [200] 72ms", + "timestamp": "2022-09-26 14:12:39.099", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 753 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 57, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.118Z GET /login [200] 57ms", + "timestamp": "2022-09-26 14:12:39.118", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 754 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 56, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.136Z GET /login [200] 56ms", + "timestamp": "2022-09-26 14:12:39.136", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 755 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.155Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:39.155", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 756 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.182Z GET /posts/45326 [500] 48ms", + "timestamp": "2022-09-26 14:12:39.182", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 757 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.204Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:39.204", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 758 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.232Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:39.232", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 759 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.258Z GET /posts/45326/edit [200] 36ms", + "timestamp": "2022-09-26 14:12:39.258", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 760 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.280Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:39.280", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 761 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.309Z GET /posts/45326 [200] 65ms", + "timestamp": "2022-09-26 14:12:39.309", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 762 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.333Z GET /posts/45326/edit [200] 43ms", + "timestamp": "2022-09-26 14:12:39.333", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 763 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.354Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:39.354", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 764 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.386Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:39.386", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 765 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.410Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:39.410", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 766 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "DELETE", + "took_ms": 103, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.429Z DELETE /users [204] 103ms", + "timestamp": "2022-09-26 14:12:39.429", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 767 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.448Z GET /posts/45326 [200] 38ms", + "timestamp": "2022-09-26 14:12:39.448", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 768 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 53, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.472Z DELETE /posts/45326 [204] 53ms", + "timestamp": "2022-09-26 14:12:39.472", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 769 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.500Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:39.500", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 770 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.523Z GET /login [200] 57ms", + "timestamp": "2022-09-26 14:12:39.523", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 771 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.551Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:39.551", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 772 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.576Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:39.576", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 773 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.600Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:39.600", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 774 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.625Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:39.625", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 775 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.643Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:39.643", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 776 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.666Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:39.666", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 777 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.690Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:39.690", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 778 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.710Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:39.710", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 779 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 49, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.740Z GET /login [200] 49ms", + "timestamp": "2022-09-26 14:12:39.740", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 780 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.772Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:39.772", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 781 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.794Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:39.794", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 782 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.815Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:39.815", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 783 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.844Z GET /users [200] 48ms", + "timestamp": "2022-09-26 14:12:39.844", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 784 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.872Z GET /login [200] 43ms", + "timestamp": "2022-09-26 14:12:39.872", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 785 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.899Z GET /posts/45326/edit [200] 64ms", + "timestamp": "2022-09-26 14:12:39.899", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 786 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.930Z GET /users [200] 47ms", + "timestamp": "2022-09-26 14:12:39.930", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 787 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.955Z GET /login [200] 48ms", + "timestamp": "2022-09-26 14:12:39.955", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 788 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.975Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:39.975", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 789 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:39.993Z GET /users [200] 50ms", + "timestamp": "2022-09-26 14:12:39.993", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 790 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.025Z GET /login [200] 42ms", + "timestamp": "2022-09-26 14:12:40.025", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 791 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.057Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:40.057", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 792 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.084Z GET /posts/45326 [200] 53ms", + "timestamp": "2022-09-26 14:12:40.084", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 793 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.107Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:40.107", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 794 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 49, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.127Z GET /login [200] 49ms", + "timestamp": "2022-09-26 14:12:40.127", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 795 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.150Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:40.150", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 796 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.170Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:40.170", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 797 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.193Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:40.193", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 798 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.217Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:40.217", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 799 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.243Z GET /posts/45326 [200] 50ms", + "timestamp": "2022-09-26 14:12:40.243", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 800 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.269Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:40.269", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 801 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.291Z GET /posts/45326 [200] 54ms", + "timestamp": "2022-09-26 14:12:40.291", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 802 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.320Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:40.320", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 803 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.344Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:40.344", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 804 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 132, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.367Z POST /posts [201] 132ms", + "timestamp": "2022-09-26 14:12:40.367", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 805 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.392Z GET /login [200] 51ms", + "timestamp": "2022-09-26 14:12:40.392", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 806 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.423Z GET /login [200] 58ms", + "timestamp": "2022-09-26 14:12:40.423", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 807 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.453Z GET /posts/45326 [200] 54ms", + "timestamp": "2022-09-26 14:12:40.453", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 808 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.471Z GET /users [200] 64ms", + "timestamp": "2022-09-26 14:12:40.471", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 809 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.502Z GET /posts/45326 [200] 49ms", + "timestamp": "2022-09-26 14:12:40.502", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 810 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.531Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:40.531", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 811 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 161, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.553Z POST /posts [500] 161ms", + "timestamp": "2022-09-26 14:12:40.553", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 812 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.571Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:40.571", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 813 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 89, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.602Z PUT /posts [200] 89ms", + "timestamp": "2022-09-26 14:12:40.602", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 814 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.627Z GET /posts/45326 [200] 38ms", + "timestamp": "2022-09-26 14:12:40.627", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 815 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.647Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:40.647", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 816 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.671Z GET /users [200] 44ms", + "timestamp": "2022-09-26 14:12:40.671", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 817 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.693Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:40.693", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 818 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.714Z GET /posts/45326 [200] 57ms", + "timestamp": "2022-09-26 14:12:40.714", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 819 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.746Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:40.746", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 820 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.767Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:40.767", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 821 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 61, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.791Z GET /users [200] 61ms", + "timestamp": "2022-09-26 14:12:40.791", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 822 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.814Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:40.814", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 823 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.839Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:40.839", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 824 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.859Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:40.859", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 825 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.878Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:40.878", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 826 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.909Z GET /users [200] 51ms", + "timestamp": "2022-09-26 14:12:40.909", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 827 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 138, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.934Z POST /posts [201] 138ms", + "timestamp": "2022-09-26 14:12:40.934", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 828 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.960Z GET /posts/45326 [200] 45ms", + "timestamp": "2022-09-26 14:12:40.960", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 829 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "POST", + "took_ms": 168, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:40.981Z POST /login [201] 168ms", + "timestamp": "2022-09-26 14:12:40.981", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 830 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 155, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.009Z POST /posts [201] 155ms", + "timestamp": "2022-09-26 14:12:41.009", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 831 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.031Z GET /posts/45326 [200] 52ms", + "timestamp": "2022-09-26 14:12:41.031", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 832 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.062Z GET /posts/45326 [200] 41ms", + "timestamp": "2022-09-26 14:12:41.062", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 833 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 111, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.083Z PUT /posts [200] 111ms", + "timestamp": "2022-09-26 14:12:41.083", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 834 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 48, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.107Z DELETE /posts [204] 48ms", + "timestamp": "2022-09-26 14:12:41.107", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 835 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 36, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.139Z GET /posts [200] 36ms", + "timestamp": "2022-09-26 14:12:41.139", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 836 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.162Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:41.162", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 837 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.194Z GET /login [200] 62ms", + "timestamp": "2022-09-26 14:12:41.194", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 838 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "PUT", + "took_ms": 74, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.219Z PUT /login [200] 74ms", + "timestamp": "2022-09-26 14:12:41.219", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 839 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.247Z GET /login [200] 47ms", + "timestamp": "2022-09-26 14:12:41.247", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 840 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.267Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:41.267", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 841 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.289Z GET /posts/45326 [200] 47ms", + "timestamp": "2022-09-26 14:12:41.289", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 842 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.314Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:41.314", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 843 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.342Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:41.342", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 844 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 107, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.373Z PUT /posts/45326 [200] 107ms", + "timestamp": "2022-09-26 14:12:41.373", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 845 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.403Z GET /posts/45326 [200] 43ms", + "timestamp": "2022-09-26 14:12:41.403", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 846 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.425Z GET /posts/45326 [200] 41ms", + "timestamp": "2022-09-26 14:12:41.425", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 847 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.448Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:41.448", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 848 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 90, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.476Z PUT /posts [200] 90ms", + "timestamp": "2022-09-26 14:12:41.476", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 849 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.509Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:41.509", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 850 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 5300, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.528Z GET /posts [504] 5300ms", + "timestamp": "2022-09-26 14:12:41.528", + "http_response_code": 504, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 851 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.549Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:41.549", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 852 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 62, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.569Z GET /login [200] 62ms", + "timestamp": "2022-09-26 14:12:41.569", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 853 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "DELETE", + "took_ms": 67, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.597Z DELETE /login [204] 67ms", + "timestamp": "2022-09-26 14:12:41.597", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 854 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.619Z GET /login [200] 49ms", + "timestamp": "2022-09-26 14:12:41.619", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 855 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.642Z GET /login [200] 47ms", + "timestamp": "2022-09-26 14:12:41.642", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 856 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.670Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:41.670", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 857 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.700Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:41.700", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 858 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "POST", + "took_ms": 131, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.721Z POST /login [201] 131ms", + "timestamp": "2022-09-26 14:12:41.721", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 859 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.743Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:41.743", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 860 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 5000, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.763Z GET /posts/45326 [504] 5000ms", + "timestamp": "2022-09-26 14:12:41.763", + "http_response_code": 504, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 861 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.786Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:41.786", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 862 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.812Z GET /login [200] 39ms", + "timestamp": "2022-09-26 14:12:41.812", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 863 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.838Z GET /posts/45326 [200] 54ms", + "timestamp": "2022-09-26 14:12:41.838", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 864 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.856Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:41.856", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 865 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "POST", + "took_ms": 174, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.888Z POST /login [201] 174ms", + "timestamp": "2022-09-26 14:12:41.888", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 866 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.920Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:41.920", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 867 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.940Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:41.940", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 868 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.969Z GET /posts/45326 [500] 60ms", + "timestamp": "2022-09-26 14:12:41.969", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 869 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:41.994Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:41.994", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 870 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.022Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:42.022", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 871 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 102, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.047Z PUT /posts [200] 102ms", + "timestamp": "2022-09-26 14:12:42.047", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 872 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.075Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:42.075", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 873 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 74422, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.102Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:42.102", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 874 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.129Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:42.129", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 875 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.156Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:42.156", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 876 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 82, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.189Z PUT /posts [200] 82ms", + "timestamp": "2022-09-26 14:12:42.189", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 877 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.214Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:42.214", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 878 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 61, + "user_id": 54351, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.245Z GET /login [200] 61ms", + "timestamp": "2022-09-26 14:12:42.245", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 879 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 50, + "user_id": 74422, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.263Z GET /login [200] 50ms", + "timestamp": "2022-09-26 14:12:42.263", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 880 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.281Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:42.281", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 881 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.301Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:42.301", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 882 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 52, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.325Z GET /posts [200] 52ms", + "timestamp": "2022-09-26 14:12:42.325", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 883 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.349Z GET /posts [200] 65ms", + "timestamp": "2022-09-26 14:12:42.349", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 884 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.376Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:42.376", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 885 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.406Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:42.406", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 886 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 117, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.438Z PUT /posts/45326 [200] 117ms", + "timestamp": "2022-09-26 14:12:42.438", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 887 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.456Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:42.456", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 888 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.478Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:42.478", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 889 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.496Z GET /posts/45326 [200] 65ms", + "timestamp": "2022-09-26 14:12:42.496", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 890 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.515Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:42.515", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 891 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.536Z GET /posts/45326/edit [500] 65ms", + "timestamp": "2022-09-26 14:12:42.536", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 892 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.557Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:42.557", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 893 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 50, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.586Z DELETE /posts/45326 [204] 50ms", + "timestamp": "2022-09-26 14:12:42.586", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 894 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 82, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.617Z DELETE /posts [204] 82ms", + "timestamp": "2022-09-26 14:12:42.617", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 895 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 77, + "user_id": 6469981, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.640Z DELETE /posts/45326/edit [204] 77ms", + "timestamp": "2022-09-26 14:12:42.640", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 896 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.669Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:42.669", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 897 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.695Z GET /posts/45326 [200] 62ms", + "timestamp": "2022-09-26 14:12:42.695", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 898 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.726Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:42.726", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 899 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.758Z GET /posts/45326 [200] 57ms", + "timestamp": "2022-09-26 14:12:42.758", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 900 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.787Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:42.787", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 901 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.809Z GET /login [200] 54ms", + "timestamp": "2022-09-26 14:12:42.809", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 902 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.828Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:42.828", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 903 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.853Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:42.853", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 904 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.881Z GET /posts/45326 [200] 40ms", + "timestamp": "2022-09-26 14:12:42.881", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 905 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.909Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:42.909", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 906 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.939Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:42.939", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 907 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.967Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:42.967", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 908 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:42.991Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:42.991", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 909 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.009Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:43.009", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 910 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 48, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.039Z GET /posts [200] 48ms", + "timestamp": "2022-09-26 14:12:43.039", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 911 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.067Z GET /posts [200] 46ms", + "timestamp": "2022-09-26 14:12:43.067", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 912 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.090Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:43.090", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 913 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 49, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.114Z GET /posts [200] 49ms", + "timestamp": "2022-09-26 14:12:43.114", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 914 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.132Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:43.132", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 915 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.164Z GET /login [200] 44ms", + "timestamp": "2022-09-26 14:12:43.164", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 916 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 4750, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.189Z PUT /posts [504] 4750ms", + "timestamp": "2022-09-26 14:12:43.189", + "http_response_code": 504, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 917 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.207Z GET /posts/45326 [200] 59ms", + "timestamp": "2022-09-26 14:12:43.207", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 918 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.236Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:43.236", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 919 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.259Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:43.259", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 920 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.292Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:43.292", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 921 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.314Z GET /posts/45326 [200] 58ms", + "timestamp": "2022-09-26 14:12:43.314", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 922 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.335Z GET /posts/45326 [200] 39ms", + "timestamp": "2022-09-26 14:12:43.335", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 923 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.364Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:43.364", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 924 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 140, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.383Z POST /posts [201] 140ms", + "timestamp": "2022-09-26 14:12:43.383", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 925 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.405Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:43.405", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 926 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.437Z GET /users [200] 53ms", + "timestamp": "2022-09-26 14:12:43.437", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 927 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.467Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:43.467", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 928 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 54351, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.498Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:43.498", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 929 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.525Z GET /login [200] 46ms", + "timestamp": "2022-09-26 14:12:43.525", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 930 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.548Z GET /posts [200] 62ms", + "timestamp": "2022-09-26 14:12:43.548", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 931 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 73, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.572Z DELETE /posts/45326 [204] 73ms", + "timestamp": "2022-09-26 14:12:43.572", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 932 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 143, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.595Z POST /posts [201] 143ms", + "timestamp": "2022-09-26 14:12:43.595", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 933 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.625Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:43.625", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 934 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 74422, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.649Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:43.649", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 935 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "POST", + "took_ms": 131, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.669Z POST /login [201] 131ms", + "timestamp": "2022-09-26 14:12:43.669", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 936 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.689Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:43.689", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 937 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.714Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:43.714", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 938 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.743Z GET /posts/45326 [200] 53ms", + "timestamp": "2022-09-26 14:12:43.743", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 939 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.768Z GET /posts [200] 57ms", + "timestamp": "2022-09-26 14:12:43.768", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 940 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.796Z GET /posts [200] 58ms", + "timestamp": "2022-09-26 14:12:43.796", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 941 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.821Z GET /login [200] 46ms", + "timestamp": "2022-09-26 14:12:43.821", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 942 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.849Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:43.849", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 943 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 64, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.870Z GET /posts [200] 64ms", + "timestamp": "2022-09-26 14:12:43.870", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 944 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 67, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.890Z DELETE /posts [204] 67ms", + "timestamp": "2022-09-26 14:12:43.890", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 945 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 146, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.917Z POST /posts [201] 146ms", + "timestamp": "2022-09-26 14:12:43.917", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 946 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.945Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:43.945", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 947 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.966Z GET /posts/45326/edit [200] 53ms", + "timestamp": "2022-09-26 14:12:43.966", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 948 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "DELETE", + "took_ms": 56, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:43.992Z DELETE /login [204] 56ms", + "timestamp": "2022-09-26 14:12:43.992", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 949 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.015Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:44.015", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 950 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 65, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.047Z GET /posts/45326 [200] 65ms", + "timestamp": "2022-09-26 14:12:44.047", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 951 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.077Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:44.077", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 952 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "GET", + "took_ms": 58, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.101Z GET /users [200] 58ms", + "timestamp": "2022-09-26 14:12:44.101", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 953 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 47, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.119Z GET /posts [200] 47ms", + "timestamp": "2022-09-26 14:12:44.119", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 954 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 50, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.141Z GET /posts [200] 50ms", + "timestamp": "2022-09-26 14:12:44.141", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 955 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 45, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.163Z GET /posts [200] 45ms", + "timestamp": "2022-09-26 14:12:44.163", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 956 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.181Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:44.181", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 957 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 59, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.201Z GET /posts [200] 59ms", + "timestamp": "2022-09-26 14:12:44.201", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 958 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.223Z GET /login [200] 46ms", + "timestamp": "2022-09-26 14:12:44.223", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 959 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.248Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:44.248", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 960 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.275Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:44.275", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 961 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.306Z GET /posts/45326 [200] 44ms", + "timestamp": "2022-09-26 14:12:44.306", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 962 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.337Z GET /posts/45326 [500] 41ms", + "timestamp": "2022-09-26 14:12:44.337", + "http_response_code": 500, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 963 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 38, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.366Z GET /posts [200] 38ms", + "timestamp": "2022-09-26 14:12:44.366", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 964 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.388Z GET /posts [200] 55ms", + "timestamp": "2022-09-26 14:12:44.388", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 965 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.412Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:44.412", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 966 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 62, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.430Z DELETE /posts [204] 62ms", + "timestamp": "2022-09-26 14:12:44.430", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 967 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 63, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.461Z GET /posts [200] 63ms", + "timestamp": "2022-09-26 14:12:44.461", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 968 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.486Z GET /posts/45326 [200] 53ms", + "timestamp": "2022-09-26 14:12:44.486", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 969 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 37, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.515Z GET /posts [200] 37ms", + "timestamp": "2022-09-26 14:12:44.515", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 970 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 54, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.546Z GET /posts [200] 54ms", + "timestamp": "2022-09-26 14:12:44.546", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 971 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 43, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.574Z GET /posts [200] 43ms", + "timestamp": "2022-09-26 14:12:44.574", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 972 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 51, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.595Z GET /posts [200] 51ms", + "timestamp": "2022-09-26 14:12:44.595", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 973 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 44, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.617Z GET /posts [200] 44ms", + "timestamp": "2022-09-26 14:12:44.617", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 974 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 89, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.649Z DELETE /posts [204] 89ms", + "timestamp": "2022-09-26 14:12:44.649", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 975 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 112, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.668Z PUT /posts [200] 112ms", + "timestamp": "2022-09-26 14:12:44.668", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 976 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 70, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.697Z DELETE /posts [204] 70ms", + "timestamp": "2022-09-26 14:12:44.697", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 977 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.720Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:44.720", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 978 + } + }, + { + "data": { + "controller": "UsersController", + "http_method": "POST", + "took_ms": 131, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.745Z POST /users [201] 131ms", + "timestamp": "2022-09-26 14:12:44.745", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 979 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.765Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:44.765", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 980 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 71, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.787Z DELETE /posts [204] 71ms", + "timestamp": "2022-09-26 14:12:44.787", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 981 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 41, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.818Z GET /posts [200] 41ms", + "timestamp": "2022-09-26 14:12:44.818", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 982 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "POST", + "took_ms": 165, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.839Z POST /posts [201] 165ms", + "timestamp": "2022-09-26 14:12:44.839", + "http_response_code": 201, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 983 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 42, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.867Z GET /posts [200] 42ms", + "timestamp": "2022-09-26 14:12:44.867", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 984 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.887Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:44.887", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 985 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 39, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.916Z GET /posts [200] 39ms", + "timestamp": "2022-09-26 14:12:44.916", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 986 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 57, + "user_id": 6476752, + "action": "edit", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.935Z GET /posts/45326/edit [200] 57ms", + "timestamp": "2022-09-26 14:12:44.935", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 987 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.958Z GET /posts/45326 [200] 62ms", + "timestamp": "2022-09-26 14:12:44.958", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 988 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 56, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:44.985Z GET /posts [200] 56ms", + "timestamp": "2022-09-26 14:12:44.985", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 989 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 63, + "user_id": 9001, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.011Z GET /login [200] 63ms", + "timestamp": "2022-09-26 14:12:45.011", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 990 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.035Z GET /posts [200] 53ms", + "timestamp": "2022-09-26 14:12:45.035", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 991 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 9001, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.060Z GET /posts [200] 60ms", + "timestamp": "2022-09-26 14:12:45.060", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 992 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 60, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.088Z GET /posts/45326 [200] 60ms", + "timestamp": "2022-09-26 14:12:45.088", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 993 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "DELETE", + "took_ms": 79, + "user_id": 6476752, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.110Z DELETE /posts [204] 79ms", + "timestamp": "2022-09-26 14:12:45.110", + "http_response_code": 204, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 994 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 55, + "user_id": 54351, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.130Z GET /posts/45326 [200] 55ms", + "timestamp": "2022-09-26 14:12:45.130", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 995 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "PUT", + "took_ms": 84, + "user_id": 6469981, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.151Z PUT /posts/45326 [200] 84ms", + "timestamp": "2022-09-26 14:12:45.151", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 996 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 40, + "user_id": 6469981, + "action": "index", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.170Z GET /posts [200] 40ms", + "timestamp": "2022-09-26 14:12:45.170", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 997 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 62, + "user_id": 6476752, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.196Z GET /posts/45326 [200] 62ms", + "timestamp": "2022-09-26 14:12:45.196", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 998 + } + }, + { + "data": { + "controller": "LoginController", + "http_method": "GET", + "took_ms": 53, + "user_id": 6476752, + "action": "login", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.222Z GET /login [200] 53ms", + "timestamp": "2022-09-26 14:12:45.222", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": 999 + } + }, + { + "data": { + "controller": "PostsController", + "http_method": "GET", + "took_ms": 46, + "user_id": 9001, + "action": "show", + "source": "pivot-fixtures", + "message": "2022-09-26T14:12:45.240Z GET /posts/45326 [200] 46ms", + "timestamp": "2022-09-26 14:12:45.240", + "http_response_code": 200, + "streams": [ + "000000000000000000000001" + ] + } + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/cluster-search-config-reset.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/cluster-search-config-reset.json new file mode 100644 index 000000000000..d553e130a0df --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/cluster-search-config-reset.json @@ -0,0 +1,36 @@ +{ + "query_time_range_limit": "PT0S", + "relative_timerange_options": { + "PT5M": "5 minutes", + "PT15M": "15 minutes", + "PT30M": "30 minutes", + "PT1H": "1 hour", + "PT2H": "2 hours", + "PT8H": "8 hours", + "P1D": "1 day", + "P2D": "2 days", + "P5D": "5 days", + "P7D": "7 days", + "P14D": "14 days", + "P30D": "30 days", + "PT0S": "all messages" + }, + "surrounding_timerange_options": { + "PT1S": "1 second", + "PT5S": "5 seconds", + "PT10S": "10 seconds", + "PT30S": "30 seconds", + "PT1M": "1 minute", + "PT5M": "5 minutes" + }, + "surrounding_filter_fields": [ + "file", + "source", + "gl2_source_input", + "source_file" + ], + "analysis_disabled_fields": [ + "full_message", + "message" + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/cluster-search-config.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/cluster-search-config.json new file mode 100644 index 000000000000..8a2bde83c5b4 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/cluster-search-config.json @@ -0,0 +1,36 @@ +{ + "query_time_range_limit": "PT2M", + "relative_timerange_options": { + "PT5M": "5 minutes", + "PT15M": "15 minutes", + "PT30M": "30 minutes", + "PT1H": "1 hour", + "PT2H": "2 hours", + "PT8H": "8 hours", + "P1D": "1 day", + "P2D": "2 days", + "P5D": "5 days", + "P7D": "7 days", + "P14D": "14 days", + "P30D": "30 days", + "PT0S": "all messages" + }, + "surrounding_timerange_options": { + "PT1S": "1 second", + "PT5S": "5 seconds", + "PT10S": "10 seconds", + "PT30S": "30 seconds", + "PT1M": "1 minute", + "PT5M": "5 minutes" + }, + "surrounding_filter_fields": [ + "file", + "source", + "gl2_source_input", + "source_file" + ], + "analysis_disabled_fields": [ + "full_message", + "message" + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/messages-for-export.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/messages-for-export.json new file mode 100644 index 000000000000..d9a892c9dcf2 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/messages-for-export.json @@ -0,0 +1,92 @@ +{ + "documents": [ + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "0" + } + }, + { + "data": { + "gl2_message_id": "0", + "source": "source-1", + "message": "Ha", + "timestamp": "2015-01-01 01:00:00.000", + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "1" + } + }, + { + "data": { + "gl2_message_id": "1", + "source": "source-2", + "message": "He", + "timestamp": "2015-01-01 02:00:00.000", + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "2" + } + }, + { + "data": { + "gl2_message_id": "2", + "source": "source-1", + "message": "Hi", + "timestamp": "2015-01-01 03:00:00.000", + "streams": [ + "000000000000000000000001" + ] + } + } + ] + }, + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "3" + } + }, + { + "data": { + "gl2_message_id": "3", + "source": "source-2", + "message": "Ho", + "timestamp": "2015-01-01 04:00:00.000", + "streams": [ + "000000000000000000000001" + ] + } + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request-with-streams.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request-with-streams.json new file mode 100644 index 000000000000..9e44df115487 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request-with-streams.json @@ -0,0 +1,22 @@ +{ + "queries": [ + { + "streams": [ + "000000000000000000000001" + ], + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "type": "messages" + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request-with-undeclared-parameter.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request-with-undeclared-parameter.json new file mode 100644 index 000000000000..d95e647d5289 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request-with-undeclared-parameter.json @@ -0,0 +1,20 @@ +{ + "queries": [ + { + "id": "f1446410-a082-4871-b3bf-d69aa42d0c96", + "query": { + "type": "elasticsearch", + "query_string": "action:$action$" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "type": "messages" + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request.json new file mode 100644 index 000000000000..81806b6f67a5 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/minimalistic-request.json @@ -0,0 +1,19 @@ +{ + "queries": [ + { + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "type": "messages" + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/mongodb-stored-searches-for-execution-endpoint.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/mongodb-stored-searches-for-execution-endpoint.json new file mode 100644 index 000000000000..142a32853252 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/mongodb-stored-searches-for-execution-endpoint.json @@ -0,0 +1,82 @@ +{ + "searches": [ + { + "_id": { + "$oid": "61977043c1f17d26b45c8a0b" + }, + "queries": [ + { + "id": "f1446410-a082-4871-b3bf-d69aa42d0c96", + "query": { + "type": "elasticsearch", + "query_string": "action:$action$" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "timerange": null, + "query": null, + "streams": [], + "id": "8306779b-933f-473f-837d-b7a7d83a9a40", + "name": "chart", + "series": [ + { + "type": "count", + "id": "count()" + } + ], + "sort": [], + "rollup": true, + "type": "pivot", + "row_groups": [ + { + "type": "time", + "field": "timestamp", + "interval": { + "type": "auto", + "scaling": 1.0 + } + } + ], + "column_groups": [] + } + ] + }, + { + "id": "f1446410-a082-4871-b3bf-d69aa42d0c97", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "timerange": null, + "query": null, + "streams": [], + "id": "01c76680-377b-4930-86e2-a55fdb867b58", + "name": null, + "limit": 150, + "offset": 0, + "sort": [ + { + "field": "timestamp", + "order": "DESC" + } + ], + "fields": [], + "decorators": [], + "type": "messages" + } + ] + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/mongodb-stored-searches-for-metadata-endpoint.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/mongodb-stored-searches-for-metadata-endpoint.json new file mode 100644 index 000000000000..ddccf588ad90 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/mongodb-stored-searches-for-metadata-endpoint.json @@ -0,0 +1,50 @@ +{ + "searches": [ + { + "_id": { + "$oid": "61977043c1f17d26b45c8a0a" + }, + "queries": [ + { + "id": "f1446410-a082-4871-b3bf-d69aa42d0c96", + "query": { + "type": "elasticsearch", + "query_string": "action:$action$" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "type": "messages" + } + ] + } + ] + }, + { + "_id": { + "$oid": "61977428c1f17d26b45c8a0b" + }, + "queries": [ + { + "id": "f1446410-a082-4871-b3bf-d69aa42d0c96", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "type": "messages" + } + ] + } + ] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/save-search-request-invalid.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/save-search-request-invalid.json new file mode 100644 index 000000000000..9bf50c31ab8b --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/save-search-request-invalid.json @@ -0,0 +1,31 @@ +{ + "id": "6141d457d3a6b9d73c8ac55c", + "queries": [ + { + "id": "278992f6-8930-43f5-9dd7-11bbc4e3b797", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "size": 100, + "streams": [], + "fields": [ + "timestamp", + "source", + "message" + ], + "type": "logs", + "id": "967d2217-fd99-48a6-b829-5acdab906807", + "sort": "DESC" + } + ] + } + ], + "parameters": [] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/save-search-request.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/save-search-request.json new file mode 100644 index 000000000000..7f7bf49ad097 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/save-search-request.json @@ -0,0 +1,31 @@ +{ + "id": "6141d457d3a6b9d73c8ac55a", + "queries": [ + { + "id": "278992f6-8930-43f5-9dd7-11bbc4e3b797", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "size": 100, + "streams": [], + "fields": [ + "timestamp", + "source", + "message" + ], + "type": "logs", + "id": "967d2217-fd99-48a6-b829-5acdab906807", + "sort": "DESC" + } + ] + } + ], + "parameters": [] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/search-with-three-empty-queries-v2.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/search-with-three-empty-queries-v2.json new file mode 100644 index 000000000000..7f1512b77f87 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/search-with-three-empty-queries-v2.json @@ -0,0 +1,44 @@ +{ + "queries": [ + { + "id": "4966dd79-2c7d-4ba9-8f90-c84aea7b5c49", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "streams": [], + "filters": [], + "search_types": [] + }, + { + "id": "0d5b45b8-1f55-4b60-ad34-d086ddd5d8fa", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "streams": [], + "filters": [], + "search_types": [] + }, + { + "id": "3eec6f5c-0f1b-41dc-bb95-3ebc6bb905f3", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/search-with-three-empty-queries.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/search-with-three-empty-queries.json new file mode 100644 index 000000000000..a4cf299a2e72 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/search-with-three-empty-queries.json @@ -0,0 +1,44 @@ +{ + "queries": [ + { + "id": "4966dd79-2c7d-4ba9-8f90-c84aea7b5c49", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "filter": null, + "filters": [], + "search_types": [] + }, + { + "id": "0d5b45b8-1f55-4b60-ad34-d086ddd5d8fa", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "filter": null, + "filters": [], + "search_types": [] + }, + { + "id": "3eec6f5c-0f1b-41dc-bb95-3ebc6bb905f3", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [] + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/startpage-save-search-request.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/startpage-save-search-request.json new file mode 100644 index 000000000000..8527a74369bb --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/startpage-save-search-request.json @@ -0,0 +1,31 @@ +{ + "id": "6141d457d3a6b9d73c8ac55b", + "queries": [ + { + "id": "278992f6-8930-43f5-9dd7-11bbc4e3b797", + "query": { + "type": "elasticsearch", + "query_string": "" + }, + "timerange": { + "type": "relative", + "from": 300 + }, + "search_types": [ + { + "size": 100, + "streams": [], + "fields": [ + "timestamp", + "source", + "message" + ], + "type": "logs", + "id": "967d2217-fd99-48a6-b829-5acdab906807", + "sort": "DESC" + } + ] + } + ], + "parameters": [] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/startpage-views-request.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/startpage-views-request.json new file mode 100644 index 000000000000..60cd1e8944b6 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/startpage-views-request.json @@ -0,0 +1,41 @@ +{ + "id": "6141d45bd3a6b9d73c8ac55c", + "type": "DASHBOARD", + "title": "test", + "search_id": "6141d457d3a6b9d73c8ac55b", + "state": { + "278992f6-8930-43f5-9dd7-11bbc4e3b797": { + "titles": {}, + "widgets": [ + { + "id": "34316a67-3dca-4994-a061-2fc6a9bf7f0b", + "type": "logs", + "config": { + "fields": [ + "timestamp", + "source", + "message" + ], + "size": 100, + "sort": "DESC" + }, + "streams": [] + } + ], + "widget_mapping": { + "34316a67-3dca-4994-a061-2fc6a9bf7f0b": [ + "967d2217-fd99-48a6-b829-5acdab906807" + ] + }, + "positions": { + "34316a67-3dca-4994-a061-2fc6a9bf7f0b": { + "col": 1, + "row": 1, + "height": 4, + "width": 12 + } + } + } + }, + "created_at": "2021-09-15T11:08:42.248Z" +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request-invalid-search-type.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request-invalid-search-type.json new file mode 100644 index 000000000000..bb569f819428 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request-invalid-search-type.json @@ -0,0 +1,41 @@ +{ + "id": "6141d45bd3a6b9d73c8ac55b", + "type": "DASHBOARD", + "title": "test", + "search_id": "6141d457d3a6b9d73c8ac55c", + "state": { + "278992f6-8930-43f5-9dd7-11bbc4e3b797": { + "titles": {}, + "widgets": [ + { + "id": "34316a67-3dca-4994-a061-2fc6a9bf7f0b", + "type": "logs", + "config": { + "fields": [ + "timestamp", + "source", + "message" + ], + "size": 100, + "sort": "DESC" + }, + "streams": [] + } + ], + "widget_mapping": { + "34316a67-3dca-4994-a061-2fc6a9bf7f0b": [ + "967d2217-fd99-48a6-b829-5acdab906808" + ] + }, + "positions": { + "34316a67-3dca-4994-a061-2fc6a9bf7f0b": { + "col": 1, + "row": 1, + "height": 4, + "width": 12 + } + } + } + }, + "created_at": "2021-09-15T11:08:42.248Z" +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request-invalid-widgets.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request-invalid-widgets.json new file mode 100644 index 000000000000..0a1e58a91c05 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request-invalid-widgets.json @@ -0,0 +1,41 @@ +{ + "id": "6141d45bd3a6b9d73c8ac55b", + "type": "DASHBOARD", + "title": "test", + "search_id": "6141d457d3a6b9d73c8ac55a", + "state": { + "278992f6-8930-43f5-9dd7-11bbc4e3b797": { + "titles": {}, + "widgets": [ + { + "id": "34316a67-3dca-4994-a061-2fc6a9bf7f0b", + "type": "logs", + "config": { + "fields": [ + "timestamp", + "source", + "message" + ], + "size": 100, + "sort": "DESC" + }, + "streams": [] + } + ], + "widget_mapping": { + "4b9d4d9b-ded8-4952-8016-7340166dc234": [ + "967d2217-fd99-48a6-b829-5acdab906807" + ] + }, + "positions": { + "34316a67-3dca-4994-a061-2fc6a9bf7f0b": { + "col": 1, + "row": 1, + "height": 4, + "width": 12 + } + } + } + }, + "created_at": "2021-09-15T11:08:42.248Z" +} diff --git a/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request.json b/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request.json new file mode 100644 index 000000000000..075b30c4f70d --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/plugins/views/views-request.json @@ -0,0 +1,41 @@ +{ + "id": "6141d45bd3a6b9d73c8ac55b", + "type": "DASHBOARD", + "title": "test", + "search_id": "6141d457d3a6b9d73c8ac55a", + "state": { + "278992f6-8930-43f5-9dd7-11bbc4e3b797": { + "titles": {}, + "widgets": [ + { + "id": "34316a67-3dca-4994-a061-2fc6a9bf7f0b", + "type": "logs", + "config": { + "fields": [ + "timestamp", + "source", + "message" + ], + "size": 100, + "sort": "DESC" + }, + "streams": [] + } + ], + "widget_mapping": { + "34316a67-3dca-4994-a061-2fc6a9bf7f0b": [ + "967d2217-fd99-48a6-b829-5acdab906807" + ] + }, + "positions": { + "34316a67-3dca-4994-a061-2fc6a9bf7f0b": { + "col": 1, + "row": 1, + "height": 4, + "width": 12 + } + } + } + }, + "created_at": "2021-09-15T11:08:42.248Z" +} diff --git a/full-backend-tests/src/test/resources/org/graylog/testing/fullbackend/access-token.json b/full-backend-tests/src/test/resources/org/graylog/testing/fullbackend/access-token.json new file mode 100644 index 000000000000..f433fd6556e5 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/testing/fullbackend/access-token.json @@ -0,0 +1,15 @@ +{ + "access_tokens": [ + { + "_id": { + "$oid": "54e3deadbeefdeadbeef0001" + }, + "last_access": { + "$date": "2020-02-26T21:50:12.454Z" + }, + "token": "1f56ccn5hpokf9f8d91pn0j7tqlv7552f92jt6i4lnb3u7b5osjq", + "username": "admin", + "NAME": "test" + } + ] +} diff --git a/full-backend-tests/src/test/resources/org/graylog/testing/fullbackend/one-message.json b/full-backend-tests/src/test/resources/org/graylog/testing/fullbackend/one-message.json new file mode 100644 index 000000000000..bf1e3340dec9 --- /dev/null +++ b/full-backend-tests/src/test/resources/org/graylog/testing/fullbackend/one-message.json @@ -0,0 +1,25 @@ +{ + "documents": [ + { + "document": [ + { + "index": { + "indexName": "graylog_0", + "indexType": "message", + "indexId": "0" + } + }, + { + "data": { + "source": "affenmann.info", + "message": "hello from es fixture", + "timestamp": "2015-01-01 01:00:00.000", + "streams": [ + "000000000000000000000001" + ] + } + } + ] + } + ] +} diff --git a/graylog-plugin-archetype/LICENSE b/graylog-plugin-archetype/LICENSE deleted file mode 100644 index b9c3495cb409..000000000000 --- a/graylog-plugin-archetype/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ -GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - A Maven archetype to bootstrap a plugin project for Graylog. - Copyright (C) 2014-15 TORCH GmbH, 2015 Graylog, Inc. - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Graylog Plugin Maven Archetype, Copyright (C) 2014-2015 TORCH GmbH, 2015 Graylog, Inc. - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/graylog-plugin-archetype/README.md b/graylog-plugin-archetype/README.md index a3729c43ce4d..122f89a19277 100644 --- a/graylog-plugin-archetype/README.md +++ b/graylog-plugin-archetype/README.md @@ -1,7 +1,7 @@ Graylog Plugin Maven Archetype ============================== -See our latest documentation on [writing plugins](http://docs.graylog.org/en/latest/pages/plugins.html). +See our latest documentation on [writing plugins](https://docs.graylog.org/docs/plugins). ## Creating a new plugin project diff --git a/graylog-plugin-archetype/pom.xml b/graylog-plugin-archetype/pom.xml index fa925f078d81..2144a8ae8142 100644 --- a/graylog-plugin-archetype/pom.xml +++ b/graylog-plugin-archetype/pom.xml @@ -1,28 +1,28 @@ + + Copyright (C) 2020 Graylog, Inc. + + This program is free software: you can redistribute it and/or modify + it under the terms of the Server Side Public License, version 1, + as published by MongoDB, Inc. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + Server Side Public License for more details. + + You should have received a copy of the Server Side Public License + along with this program. If not, see + . + +--> 4.0.0 org.graylog graylog-project-parent - 3.0.0-beta.2-SNAPSHOT + 6.2.0-SNAPSHOT ../graylog-project-parent @@ -46,7 +46,7 @@ org.apache.maven.archetype archetype-packaging - 3.0.1 + 3.3.1 @@ -77,31 +77,6 @@ B1606F22 - - com.mycila - license-maven-plugin - -
com/mycila/maven/plugin/license/templates/GPL-3.txt
- - ${project.organization.name} - Graylog - - - **/src/main/java/** - **/src/test/java/** - - - **/src/main/resources/** - -
- - - - check - - - -
@@ -109,12 +84,12 @@ org.apache.maven.plugins maven-archetype-plugin - 3.0.1 + 3.3.1 org.apache.maven.plugins maven-resources-plugin - 3.1.0 + 3.3.1 diff --git a/graylog-plugin-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml b/graylog-plugin-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml index 4d89b0a3124e..24abc43bef19 100644 --- a/graylog-plugin-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml +++ b/graylog-plugin-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml @@ -64,11 +64,17 @@ + + .mvn + + **/* + + + .gitignore - .travis.yml README.md GETTING-STARTED.md package.json diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/.eslintrc b/graylog-plugin-archetype/src/main/resources/archetype-resources/.eslintrc deleted file mode 100644 index 7544c5e29a27..000000000000 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/.eslintrc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "parser": "babel-eslint", - "ecmaFeatures": { - "classes": true, - "jsx": true, - }, - "extends": [ - "graylog", - ], -} diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/.mvn/jvm.config b/graylog-plugin-archetype/src/main/resources/archetype-resources/.mvn/jvm.config new file mode 100644 index 000000000000..32599cefea51 --- /dev/null +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/.mvn/jvm.config @@ -0,0 +1,10 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/.travis.yml b/graylog-plugin-archetype/src/main/resources/archetype-resources/.travis.yml deleted file mode 100644 index 574c2ec6e3b1..000000000000 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/.travis.yml +++ /dev/null @@ -1,27 +0,0 @@ -sudo: required -dist: trusty -language: java -jdk: - - oraclejdk8 -addons: - apt: - packages: - - rpm -before_deploy: - - mvn jdeb:jdeb && export RELEASE_DEB_FILE=$(ls target/*.deb) - - mvn rpm:rpm && export RELEASE_RPM_FILE=$(find target/ -name '*.rpm' | tail -1) - - rm -f target/original-*.jar - - export RELEASE_PKG_FILE=$(ls target/*.jar) - - echo "Deploying release to GitHub releases" -deploy: - provider: releases - api_key: - secure: - file: - - "${RELEASE_PKG_FILE}" - - "${RELEASE_DEB_FILE}" - - "${RELEASE_RPM_FILE}" - skip_cleanup: true - on: - tags: true - jdk: oraclejdk8 diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/GETTING-STARTED.md b/graylog-plugin-archetype/src/main/resources/archetype-resources/GETTING-STARTED.md index dd82c9dbe190..23880c9965fe 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/GETTING-STARTED.md +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/GETTING-STARTED.md @@ -3,24 +3,6 @@ Getting started with your new Graylog plugin Welcome to your new Graylog plugin! -Please refer to http://docs.graylog.org/en/latest/pages/plugins.html for documentation on how to write +Please refer to https://docs.graylog.org/docs/plugins for documentation on how to write plugins for Graylog. -Travis CI ---------- - -There is a `.travis.yml` template in this project which is prepared to automatically -deploy the plugin artifacts (JAR, DEB, RPM) to GitHub releases. - -You just have to add your encrypted GitHub access token to the `.travis.yml`. -The token can be generated in your [GitHub personal access token settings](https://github.com/settings/tokens). - -Before Travis CI works, you have to enable it. Install the Travis CI command line -application and execute `travis enable`. - -To encrypt your GitHub access token you can use `travis encrypt`. - -Alternatively you can use `travis setup -f releases` to automatically create a GitHub -access token and add it to the `.travis.yml` file. **Attention:** doing this -will replace some parts of the `.travis.yml` file and you have to restore previous -settings. diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/README.md b/graylog-plugin-archetype/src/main/resources/archetype-resources/README.md index bd5395a852ea..efbc27c9fec7 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/README.md +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/README.md @@ -1,7 +1,5 @@ # ${pluginClassName} Plugin for Graylog -[![Build Status](https://travis-ci.org/${githubRepo}.svg?branch=master)](https://travis-ci.org/${githubRepo}) - __Use this paragraph to enter a description of your plugin.__ **Required Graylog version:** 2.0 and later @@ -55,4 +53,4 @@ $ mvn release:prepare $ mvn release:perform ``` -This sets the version numbers, creates a tag and pushes to GitHub. Travis CI will build the release artifacts and upload to GitHub automatically. +This sets the version numbers, creates a tag and pushes to GitHub. diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/package.json b/graylog-plugin-archetype/src/main/resources/archetype-resources/package.json index ce4d7ebe470d..9848d2ff479e 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/package.json +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/package.json @@ -8,7 +8,8 @@ }, "scripts": { "build": "webpack", - "lint": "eslint -c .eslintrc src/**/*", + "lint": "eslint src", + "lint:path": "eslint", "test": "jest" }, "keywords": [ @@ -16,6 +17,9 @@ ], "author": "${ownerName} <${ownerEmail}>", "license": "MIT", + "eslintConfig": { + "extends": "graylog" + }, "dependencies": { }, "devDependencies": { diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/pom.xml b/graylog-plugin-archetype/src/main/resources/archetype-resources/pom.xml index a1126204b61c..f3f69cafebd3 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/pom.xml +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/pom.xml @@ -1,4 +1,22 @@ + @@ -36,8 +54,8 @@ UTF-8 - 1.8 - 1.8 + 17 + 17 true @@ -60,7 +78,7 @@ - + sonatype-nexus-snapshots Sonatype Nexus Snapshots @@ -92,11 +110,19 @@ ${graylog.version} provided + + + org.graylog2 + graylog2-server + ${graylog.version} + test-jar + test + - build + ${web.build-dir} src/main/resources true @@ -236,7 +262,7 @@ ${nodejs.version} - ${yarn.version} + ${yarn.version} diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__.java b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__.java index 59311200601a..05fcc8b3a8fa 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__.java +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__.java @@ -1,3 +1,19 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ package ${package}; /** diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__MetaData.java b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__MetaData.java index a7e1178544ff..ff9329d2d0a7 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__MetaData.java +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__MetaData.java @@ -1,3 +1,19 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ package ${package}; import org.graylog2.plugin.PluginMetaData; diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Module.java b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Module.java index 3eef189b6c61..1569fba38dd8 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Module.java +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Module.java @@ -1,3 +1,19 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ package ${package}; import org.graylog2.plugin.PluginConfigBean; diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Plugin.java b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Plugin.java index 13e66f8df3f9..54fc5bdb6433 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Plugin.java +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/src/main/java/__pluginClassName__Plugin.java @@ -1,3 +1,19 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ package ${package}; import org.graylog2.plugin.Plugin; diff --git a/graylog-plugin-archetype/src/main/resources/archetype-resources/webpack.config.js b/graylog-plugin-archetype/src/main/resources/archetype-resources/webpack.config.js index 2286642b901e..d409a6478bd3 100644 --- a/graylog-plugin-archetype/src/main/resources/archetype-resources/webpack.config.js +++ b/graylog-plugin-archetype/src/main/resources/archetype-resources/webpack.config.js @@ -1,8 +1,8 @@ -const PluginWebpackConfig = require('graylog-web-plugin').PluginWebpackConfig; -const loadBuildConfig = require('graylog-web-plugin').loadBuildConfig; const path = require('path'); +const { PluginWebpackConfig } = require('graylog-web-plugin'); +const { loadBuildConfig } = require('graylog-web-plugin'); // Remember to use the same name here and in `getUniqueId()` in the java MetaData class -module.exports = new PluginWebpackConfig('${package}.${pluginClassName}Plugin', loadBuildConfig(path.resolve(__dirname, './build.config')), { +module.exports = new PluginWebpackConfig(__dirname, '${package}.${pluginClassName}Plugin', loadBuildConfig(path.resolve(__dirname, './build.config')), { // Here goes your additional webpack configuration. }); diff --git a/graylog-plugin-parent/graylog-plugin-web-parent/package.json b/graylog-plugin-parent/graylog-plugin-web-parent/package.json index 30e40525350a..ef05f3daef25 100644 --- a/graylog-plugin-parent/graylog-plugin-web-parent/package.json +++ b/graylog-plugin-parent/graylog-plugin-web-parent/package.json @@ -3,12 +3,13 @@ "version": "1.0.0", "description": "Dummy package.json for graylog-plugin-web-parent", "author": "Graylog, Inc.", - "license": "GPL-3.0", + "license": "SSPL-1.0", "repository": { "type": "git", "url": "git://github.com/Graylog2/graylog2-server.git" }, "scripts": { - "build": "echo 'Dummy build script for graylog-plugin-web-parent'" + "build": "echo 'Dummy build script for graylog-plugin-web-parent'", + "generate:apidefs": "echo 'Dummy Backend API generation script for graylog-plugin-web-parent'" } } diff --git a/graylog-plugin-parent/graylog-plugin-web-parent/pom.xml b/graylog-plugin-parent/graylog-plugin-web-parent/pom.xml index 47f76a89de15..0f5a29b9f02c 100644 --- a/graylog-plugin-parent/graylog-plugin-web-parent/pom.xml +++ b/graylog-plugin-parent/graylog-plugin-web-parent/pom.xml @@ -1,28 +1,28 @@ + + Copyright (C) 2020 Graylog, Inc. + + This program is free software: you can redistribute it and/or modify + it under the terms of the Server Side Public License, version 1, + as published by MongoDB, Inc. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + Server Side Public License for more details. + + You should have received a copy of the Server Side Public License + along with this program. If not, see + . + +--> 4.0.0 org.graylog.plugins graylog-plugin-parent - 3.0.0-beta.2-SNAPSHOT + 6.2.0-SNAPSHOT graylog-plugin-web-parent @@ -40,12 +40,28 @@ + + org.apache.maven.plugins + maven-clean-plugin + + + + ${basedir} + + node_modules/**/* + + false + + + + com.github.eirslett frontend-maven-plugin ${nodejs.version} ${yarn.version} + https://graylog-ci-cache.s3.eu-west-1.amazonaws.com/downloads/node/ @@ -57,6 +73,7 @@ yarn install + compile yarn @@ -65,9 +82,19 @@ install - + + Generate backend API types + compile + + yarn + + + generate:apidefs + + yarn run build + compile yarn @@ -77,6 +104,25 @@ + + org.apache.maven.plugins + maven-resources-plugin + + + copy-web-ui-resources + process-classes + + copy-resources + + + ${project.build.outputDirectory} + + ${web.build-dir} + + + + + diff --git a/graylog-plugin-parent/pom.xml b/graylog-plugin-parent/pom.xml index b2db610c48c6..f2c319bdac92 100644 --- a/graylog-plugin-parent/pom.xml +++ b/graylog-plugin-parent/pom.xml @@ -1,22 +1,22 @@ + + Copyright (C) 2020 Graylog, Inc. + + This program is free software: you can redistribute it and/or modify + it under the terms of the Server Side Public License, version 1, + as published by MongoDB, Inc. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + Server Side Public License for more details. + + You should have received a copy of the Server Side Public License + along with this program. If not, see + . + +--> 4.0.0 @@ -26,7 +26,7 @@ org.graylog graylog-parent - 3.0.0-beta.2-SNAPSHOT + 6.2.0-SNAPSHOT org.graylog.plugins @@ -37,12 +37,10 @@ /usr/share/graylog-server/plugin - - - 1.0-rc4 + ${project.build.directory}/web/build - 1.6 + 1.13 2.2.0 @@ -68,7 +66,7 @@ com.google.auto.value - auto-value + auto-value-annotations ${auto-value.version} provided @@ -92,38 +90,6 @@ - - org.apache.maven.plugins - maven-clean-plugin - - - - ${basedir} - - build/**/* - node_modules/**/* - - false - - - - - - - com.mycila - license-maven-plugin - - true - - - - - check - - - - - jdeb org.vafer diff --git a/graylog-project-parent/pom.xml b/graylog-project-parent/pom.xml index 44d17e9df8b5..d73e8e04c7b7 100644 --- a/graylog-project-parent/pom.xml +++ b/graylog-project-parent/pom.xml @@ -1,33 +1,33 @@ + + Copyright (C) 2020 Graylog, Inc. + + This program is free software: you can redistribute it and/or modify + it under the terms of the Server Side Public License, version 1, + as published by MongoDB, Inc. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + Server Side Public License for more details. + + You should have received a copy of the Server Side Public License + along with this program. If not, see + . + +--> 4.0.0 ../graylog2-server - ../integration-tests + ../full-backend-tests org.graylog graylog-parent - 3.0.0-beta.2-SNAPSHOT + 6.2.0-SNAPSHOT graylog-project-parent @@ -53,13 +53,6 @@ pom import - - com.fasterxml.jackson - jackson-bom - ${jackson.version} - pom - import - org.glassfish.jersey jersey-bom @@ -67,25 +60,6 @@ pom import - - io.netty - netty-bom - ${netty.version} - pom - import - - - io.netty - netty-tcnative-boringssl-static - ${netty-tcnative-boringssl-static.version} - osx-x86_64 - - - io.netty - netty-tcnative-boringssl-static - ${netty-tcnative-boringssl-static.version} - linux-x86_64 - com.google.guava @@ -97,11 +71,6 @@ guava-retrying ${guava-retrying.version} - - joda-time - joda-time - ${joda-time.version} - com.github.zafarkhaja java-semver @@ -112,84 +81,28 @@ semver4j ${semver4j.version} - - org.mongodb - mongodb-driver - ${mongodb-driver.version} + com.github.ben-manes.caffeine + caffeine + ${caffeine.version} - org.mongojack - mongojack - ${mongojack.version} + org.cryptomator + siv-mode + ${siv-mode.version} - - org.elasticsearch - elasticsearch - ${elasticsearch.version} - - - org.graylog.jest - jest - ${jest.version} - - - commons-logging - commons-logging - - - - - io.dropwizard.metrics - metrics-core - ${metrics.version} - - - io.dropwizard.metrics - metrics-annotation - ${metrics.version} - - - io.dropwizard.metrics - metrics-graphite - ${metrics.version} - - - io.dropwizard.metrics - metrics-log4j2 - ${metrics.version} - - - io.dropwizard.metrics - metrics-jvm - ${metrics.version} - - - io.dropwizard.metrics - metrics-jmx - ${metrics.version} - - - io.dropwizard.metrics - metrics-json - ${metrics.version} + io.prometheus + simpleclient_bom + ${prometheus-client.version} + pom + import org.reflections reflections ${reflections.version} - - com.jayway.jsonpath - json-path - ${json-path.version} - - - com.squareup.okhttp3 - okhttp - ${okhttp.version} - org.graylog2 @@ -199,15 +112,15 @@ - com.github.joschi + org.graylog jadconfig ${jadconfig.version} - javax.ws.rs - javax.ws.rs-api - ${javax.ws.rs-api.version} + jakarta.ws.rs + jakarta.ws.rs-api + ${jakarta.ws.rs-api.version} org.hibernate.validator @@ -215,30 +128,6 @@ ${hibernate-validator.version} - - - org.apache.logging.log4j - log4j-bom - ${log4j.version} - pom - import - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.slf4j - jcl-over-slf4j - ${slf4j.version} - - - org.slf4j - log4j-over-slf4j - ${slf4j.version} - - commons-io commons-io @@ -250,21 +139,23 @@ ${commons-codec.version} + + io.github.classgraph + classgraph + ${classgraph.version} + + org.apache.directory.api api-all ${apache-directory-version} + test - org.scala-lang - scala-library - ${scala.version} - - - org.apache.kafka - kafka_2.11 - ${kafka.version} + org.graylog.shaded + kafka09_2.11 + ${kafka09.version} com.sun.jmx @@ -288,33 +179,10 @@ - - org.apache.zookeeper - zookeeper - ${zookeeper.version} - - - - org.slf4j - slf4j-log4j12 - - - log4j - log4j - - - - - com.101tec - zkclient - ${zkclient.version} - - - log4j - log4j - - + org.apache.kafka + kafka-clients + ${kafka.version} @@ -323,12 +191,6 @@ ${amqp-client.version} - - com.google.protobuf - protobuf-java - ${protobuf.version} - - com.lmax disruptor @@ -336,10 +198,15 @@ - com.eaio.uuid + org.graylog2.repackaged uuid ${uuid.version} + + de.huxhorn.sulky + de.huxhorn.sulky.ulid + ${ulid.version} + org.apache.shiro @@ -353,6 +220,18 @@ ${commons-email.version} + + commons-validator + commons-validator + ${commons-validator.version} + + + commons-logging + commons-logging + + + + net.sf.opencsv opencsv @@ -376,18 +255,31 @@ com.floreysoft jmte ${jmte.version} + + + + asm + asm + + + + + + jakarta.annotation + jakarta.annotation-api + ${jakarta.annotation-api.version} - javax.annotation - javax.annotation-api - ${javax.annotation-api.version} + jakarta.inject + jakarta.inject-api + ${jakarta.inject.version} - javax.inject - javax.inject - ${javax.inject.version} + javax.xml.bind + jaxb-api + ${jaxb-api.version} @@ -419,36 +311,13 @@ ${HdrHistogram.version} - com.google.auto.value - auto-value-annotations - ${auto-value.version} + com.github.oshi + oshi-core + ${oshi.version} - com.google.auto.value - auto-value - ${auto-value.version} - provided - - - org.graylog.autovalue - auto-value-javabean - ${auto-value-javabean.version} - provided - - - org.fusesource - sigar - ${sigar.version} - - - log4j - log4j - - - - - io.krakens - java-grok + org.graylog2.repackaged + grok ${grok.version} @@ -456,11 +325,6 @@ swagger-annotations ${swagger.version} - - javax.el - javax.el-api - ${javax.el-api.version} - com.squareup.retrofit2 retrofit @@ -477,8 +341,8 @@ ${jbcrypt.version} - javax.validation - validation-api + jakarta.validation + jakarta.validation-api ${validation-api.version} @@ -498,19 +362,59 @@ org.jooq - jool-java-8 + jool ${jool.version} - - com.squareup - javapoet - ${javapoet.version} - org.freemarker freemarker ${freemarker.version} + + one.util + streamex + ${streamex.version} + + + info.leadinglight + jdot + ${jdot.version} + + + com.unboundid + unboundid-ldapsdk + ${unboundid-ldap.version} + + + com.github.stateless4j + stateless4j + ${stateless4j.version} + + + + + io.opentelemetry + opentelemetry-bom + ${opentelemetry.version} + pom + import + + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations + ${opentelemetry.version} + + + software.amazon.msk + aws-msk-iam-auth + ${aws-msk-iam-auth.version} + + + commons-logging + commons-logging + + + @@ -532,19 +436,7 @@ test - com.revinate - assertj-json - ${assertj-json.version} - test - - - org.mockito - mockito-core - ${mockito.version} - test - - - com.jayway.restassured + io.rest-assured rest-assured ${restassured.version} test @@ -556,28 +448,17 @@ - com.jayway.restassured + io.rest-assured json-path ${restassured.version} test - com.lordofthejars - nosqlunit-elasticsearch2 - ${nosqlunit.version} - test - - - com.lordofthejars - nosqlunit-mongodb - ${nosqlunit.version} - test - - - com.github.joschi.nosqlunit - nosqlunit-elasticsearch-http - ${nosqlunit-elasticsearch-http.version} - test + org.testcontainers + testcontainers-bom + ${testcontainers.version} + pom + import org.jukito @@ -585,18 +466,6 @@ ${jukito.version} test - - com.github.fakemongo - fongo - ${fongo.version} - test - - - org.awaitility - awaitility - ${awaitility.version} - test - nl.jqno.equalsverifier equalsverifier @@ -614,6 +483,14 @@ org.apache.directory.api api-ldap-schema-data + + org.bouncycastle + bcprov-jdk15on + + + org.bouncycastle + bcpkix-jdk15on + @@ -647,6 +524,12 @@ ${pkts.version} test + + com.cronutils + cron-utils + ${cron-utils.version} + + @@ -656,7 +539,7 @@ com.mycila license-maven-plugin - 2.11 + ${license-maven.version} @@ -667,7 +550,7 @@ 2 true - -Djava.library.path=${project.basedir}/../lib/sigar-${sigar.version} -Dio.netty.leakDetectionLevel=paranoid -Djava.awt.headless=true + -Dio.netty.leakDetectionLevel=paranoid -Djava.awt.headless=true **/*IntegrationTest.java **/*IT.java @@ -679,44 +562,18 @@ - de.thetaphi - forbiddenapis + org.apache.maven.plugins + maven-failsafe-plugin - - org.graylog2.shared.SuppressForbidden - - - false - - false - true - - - jdk-unsafe - jdk-deprecated - jdk-reflection - - commons-io-unsafe-2.5 - - - - ${project.basedir}/../config/forbidden-apis/netty3.txt - ${project.basedir}/../config/forbidden-apis/signatures.txt - + 1 + false + -Djava.awt.headless=true - forbidden-apis-src - compile - check - - - - forbidden-apis-test - test-compile - - testCheck + integration-test + verify @@ -724,27 +581,6 @@ com.mycila license-maven-plugin - -
com/mycila/maven/plugin/license/templates/GPL-3.txt
- - ${project.organization.name} - Graylog - - - **/src/main/java/** - **/src/test/java/** - - - graylog2-web-interface/plugin/** - -
- - - - check - - -
@@ -781,12 +617,12 @@ com.h3xstream.findsecbugs findsecbugs-plugin - 1.7.1 + 1.13.0 com.mebigfatguy.fb-contrib fb-contrib - 7.4.3 + 7.6.9 @@ -803,14 +639,23 @@ maven-pmd-plugin ${maven.compiler.target} - false + 1 + true + false ${project.build.directory}/generated-sources ${project.build.directory}/generated-test-sources + + /rulesets/java/maven-pmd-plugin-default.xml + ../config/pmd-rules.xml + + + validate + check @@ -820,25 +665,5 @@ - - travis - - - env.TRAVIS - true - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - 2 - - - - -
diff --git a/graylog-storage-elasticsearch7/assembly.xml b/graylog-storage-elasticsearch7/assembly.xml new file mode 100644 index 000000000000..90409aa90f5e --- /dev/null +++ b/graylog-storage-elasticsearch7/assembly.xml @@ -0,0 +1,24 @@ + + + with-dependencies + + jar + + false + + + / + true + runtime + true + true + + org.graylog:graylog-storage-elasticsearch7 + org.graylog.shaded:elasticsearch7 + org.graylog.shaded:elasticsearch-rest-client-sniffer7 + + + + diff --git a/graylog-storage-elasticsearch7/pom.xml b/graylog-storage-elasticsearch7/pom.xml new file mode 100644 index 000000000000..72d5a026537d --- /dev/null +++ b/graylog-storage-elasticsearch7/pom.xml @@ -0,0 +1,202 @@ + + + + + org.graylog.plugins + graylog-plugin-parent + 6.2.0-SNAPSHOT + ../graylog-plugin-parent + + 4.0.0 + jar + + org.graylog + graylog-storage-elasticsearch7 + graylog-storage-elasticsearch7 + Graylog Storage Module for Elasticsearch 7 + + + 7.9.1-0 + true + + + + + org.graylog2 + graylog2-server + ${project.parent.version} + + + com.google.guava + guava + ${guava.version} + provided + + + org.graylog.shaded + elasticsearch7 + ${elasticsearch.version} + + + org.graylog.shaded + elasticsearch-rest-client-sniffer7 + ${elasticsearch.version} + + + + org.graylog2 + graylog2-server + ${project.parent.version} + test-jar + test + + + junit + junit + ${junit.version} + test + + + org.mockito + mockito-core + test + + + org.assertj + assertj-core + ${assertj-core.version} + test + + + org.assertj + assertj-joda-time + ${assertj-joda-time.version} + test + + + org.testcontainers + elasticsearch + ${testcontainers.version} + test + + + org.junit.jupiter + junit-jupiter + ${junit-jupiter.version} + test + + + org.junit.vintage + junit-vintage-engine + ${junit-jupiter.version} + test + + + + + + + src/test/resources + true + + **/*.properties + + + + src/test/resources + false + + **/*.properties + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + assembly.xml + + + ${project.groupId}.${project.artifactId} + + + + + + package + + single + + + + + + + + + release + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + true + + + + attach-javadocs + + jar + + + + + + + + + diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ChunkedQueryResultES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ChunkedQueryResultES7.java new file mode 100644 index 000000000000..6abebe50d082 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ChunkedQueryResultES7.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.common.collect.Streams; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog2.indexer.results.ChunkedQueryResult; +import org.graylog2.indexer.results.ResultMessage; +import org.graylog2.indexer.results.ResultMessageFactory; + +import java.util.List; +import java.util.stream.Collectors; + +public abstract class ChunkedQueryResultES7 extends ChunkedQueryResult { + + private final ResultMessageFactory resultMessageFactory; + + public ChunkedQueryResultES7(ResultMessageFactory resultMessageFactory, ElasticsearchClient client, + SearchResponse initialResult, String query, List fields, int limit) { + super(client, initialResult, query, fields, limit); + this.resultMessageFactory = resultMessageFactory; + } + + @Override + protected List collectMessagesFromResult(SearchResponse response) { + return Streams.stream(response.getHits()) + .map(hit -> resultMessageFactory.parseFromSource(hit.getId(), hit.getIndex(), hit.getSourceAsMap())) + .collect(Collectors.toList()); + } + + @Override + protected long countTotalHits(SearchResponse response) { + return response.getHits().getTotalHits().value; + } + + @Override + protected long getTookMillisFromResponse(SearchResponse response) { + return response.getTook().getMillis(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ClusterAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ClusterAdapterES7.java new file mode 100644 index 000000000000..ea126da38819 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ClusterAdapterES7.java @@ -0,0 +1,346 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import com.github.joschi.jadconfig.util.Duration; +import com.google.common.base.Strings; +import com.google.common.collect.Lists; +import com.google.common.primitives.Ints; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.unit.TimeValue; +import org.graylog.storage.elasticsearch7.cat.CatApi; +import org.graylog.storage.elasticsearch7.cat.IndexSummaryResponse; +import org.graylog.storage.elasticsearch7.cat.NodeResponse; +import org.graylog2.indexer.ElasticsearchException; +import org.graylog2.indexer.cluster.ClusterAdapter; +import org.graylog2.indexer.cluster.PendingTasksStats; +import org.graylog2.indexer.cluster.health.ClusterAllocationDiskSettings; +import org.graylog2.indexer.cluster.health.ClusterAllocationDiskSettingsFactory; +import org.graylog2.indexer.cluster.health.NodeDiskUsageStats; +import org.graylog2.indexer.cluster.health.NodeFileDescriptorStats; +import org.graylog2.indexer.indices.HealthStatus; +import org.graylog2.rest.models.system.indexer.responses.ClusterHealth; +import org.graylog2.system.stats.elasticsearch.ClusterStats; +import org.graylog2.system.stats.elasticsearch.IndicesStats; +import org.graylog2.system.stats.elasticsearch.NodeInfo; +import org.graylog2.system.stats.elasticsearch.NodesStats; +import org.graylog2.system.stats.elasticsearch.ShardStats; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Inject; +import jakarta.inject.Named; + +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +public class ClusterAdapterES7 implements ClusterAdapter { + private static final Logger LOG = LoggerFactory.getLogger(ClusterAdapterES7.class); + private final ElasticsearchClient client; + private final Duration requestTimeout; + private final CatApi catApi; + private final PlainJsonApi jsonApi; + + @Inject + public ClusterAdapterES7(ElasticsearchClient client, + @Named("elasticsearch_socket_timeout") Duration requestTimeout, + CatApi catApi, + PlainJsonApi jsonApi) { + this.client = client; + this.requestTimeout = requestTimeout; + this.catApi = catApi; + this.jsonApi = jsonApi; + } + + @Override + public Optional health() { + return clusterHealth().map(response -> healthStatusFrom(response.getStatus())); + } + + private HealthStatus healthStatusFrom(ClusterHealthStatus status) { + switch (status) { + case RED: + return HealthStatus.Red; + case YELLOW: + return HealthStatus.Yellow; + case GREEN: + return HealthStatus.Green; + } + + throw new IllegalStateException("Invalid health status received: " + status); + } + + @Override + public Set fileDescriptorStats() { + final List result = nodes(); + return result.stream() + .map(node -> NodeFileDescriptorStats.create(node.name(), node.ip(), node.host(), node.fileDescriptorMax())) + .collect(Collectors.toSet()); + } + + private List nodes() { + final List allNodes = catApi.nodes(); + final List nodesWithDiskStatistics = allNodes.stream().filter(NodeResponse::hasDiskStatistics).collect(Collectors.toList()); + if (allNodes.size() != nodesWithDiskStatistics.size()) { + final List nodesWithMissingDiskStatistics = allNodes.stream().filter(nr -> !nr.hasDiskStatistics()).collect(Collectors.toList()); + LOG.info("_cat/nodes API has returned " + nodesWithMissingDiskStatistics.size() + " nodes without disk statistics:"); + nodesWithMissingDiskStatistics.forEach(node -> LOG.info(node.toString())); + } + return nodesWithDiskStatistics; + } + + @Override + public Set diskUsageStats() { + final List result = nodes(); + return result.stream() + .map(node -> NodeDiskUsageStats.create(node.name(), node.role(), node.ip(), node.host(), node.diskUsed(), node.diskTotal(), node.diskUsedPercent())) + .collect(Collectors.toSet()); + } + + @Override + public ClusterAllocationDiskSettings clusterAllocationDiskSettings() { + final ClusterGetSettingsRequest request = new ClusterGetSettingsRequest(); + request.includeDefaults(true); + + final ClusterGetSettingsResponse response = client.execute((c, requestOptions) -> c.cluster().getSettings(request, requestOptions)); + return ClusterAllocationDiskSettingsFactory.create( + Boolean.parseBoolean(response.getSetting("cluster.routing.allocation.disk.threshold_enabled")), + response.getSetting("cluster.routing.allocation.disk.watermark.low"), + response.getSetting("cluster.routing.allocation.disk.watermark.high"), + response.getSetting("cluster.routing.allocation.disk.watermark.flood_stage") + ); + } + + @Override + public Optional nodeIdToName(String nodeId) { + return nodeById(nodeId) + .map(jsonNode -> jsonNode.get("name").asText()); + } + + @Override + public Optional nodeIdToHostName(String nodeId) { + return nodeById(nodeId) + .map(jsonNode -> jsonNode.path("host")) + .filter(host -> !host.isMissingNode()) + .map(JsonNode::asText); + } + + private Optional nodeById(String nodeId) { + if (Strings.isNullOrEmpty(nodeId)) { + return Optional.empty(); + } + final Request request = new Request("GET", "/_nodes/" + nodeId); + return Optional.of(jsonApi.perform(request, "Unable to retrieve node information for node id " + nodeId)) + .map(jsonNode -> jsonNode.path("nodes").path(nodeId)) + .filter(node -> !node.isMissingNode()); + } + + @Override + public boolean isConnected() { + final ClusterHealthRequest request = new ClusterHealthRequest() + .timeout(new TimeValue(requestTimeout.getQuantity(), requestTimeout.getUnit())) + .local(true); + try { + final ClusterHealthResponse result = client.execute((c, requestOptions) -> c.cluster().health(request, requestOptions)); + return result.getNumberOfDataNodes() > 0; + } catch (org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException e) { + LOG.error("Check for connectivity failed with exception '{}' - enable debug level for this class to see the stack trace.", e.getMessage()); + if (LOG.isDebugEnabled()) { + LOG.error(e.getMessage(), e); + } + return false; + } + } + + @Override + public Optional clusterName() { + return clusterHealth().map(ClusterHealthResponse::getClusterName); + } + + @Override + public Optional clusterHealthStats() { + return clusterHealth() + .map(this::clusterHealthFrom); + } + + private ClusterHealth clusterHealthFrom(ClusterHealthResponse response) { + return ClusterHealth.create(response.getStatus().toString().toLowerCase(Locale.ENGLISH), + ClusterHealth.ShardStatus.create( + response.getActiveShards(), + response.getInitializingShards(), + response.getRelocatingShards(), + response.getUnassignedShards() + ) + ); + } + + @Override + public PendingTasksStats pendingTasks() { + final Request request = new Request("GET", "/_cluster/pending_tasks"); + + final JsonNode response = jsonApi.perform(request, "Couldn't read Elasticsearch pending cluster tasks"); + + final JsonNode pendingClusterTasks = response.path("tasks"); + final int pendingTasksSize = pendingClusterTasks.size(); + final List pendingTasksTimeInQueue = Lists.newArrayListWithCapacity(pendingTasksSize); + for (JsonNode jsonElement : pendingClusterTasks) { + if (jsonElement.has("time_in_queue_millis")) { + pendingTasksTimeInQueue.add(jsonElement.get("time_in_queue_millis").asLong()); + } + } + + return PendingTasksStats.create(pendingTasksSize, pendingTasksTimeInQueue); + } + + @Override + public ClusterStats clusterStats() { + final JsonNode clusterStatsResponseJson = rawClusterStats(); + final String clusterName = clusterStatsResponseJson.path("cluster_name").asText(); + + String clusterVersion = null; + if (clusterStatsResponseJson.path("nodes").path("versions").isArray()) { + final ArrayNode versions = (ArrayNode) clusterStatsResponseJson.path("nodes").path("versions"); + // We just use the first version in the "versions" array. This is not correct if there are different + // versions running in the cluster, but that is not recommended anyway. + final JsonNode versionNode = versions.path(0); + if (versionNode.getNodeType() != JsonNodeType.MISSING) { + clusterVersion = versionNode.asText(); + } + } + + final JsonNode countStats = clusterStatsResponseJson.path("nodes").path("count"); + + final NodesStats nodesStats = NodesStats.create( + countStats.path("total").asInt(-1), + countStats.path("master_only").asInt(-1), + countStats.path("data_only").asInt(-1), + countStats.path("master_data").asInt(-1), + countStats.path("client").asInt(-1) + ); + + final JsonNode clusterIndicesStats = clusterStatsResponseJson.path("indices"); + final IndicesStats indicesStats = IndicesStats.create( + clusterIndicesStats.path("count").asInt(-1), + clusterIndicesStats.path("store").path("size_in_bytes").asLong(-1L), + clusterIndicesStats.path("fielddata").path("memory_size_in_bytes").asLong(-1L) + ); + + return ClusterStats.create(clusterName, clusterVersion, nodesStats, indicesStats); + } + + @Override + public JsonNode rawClusterStats() { + final Request request = new Request("GET", "/_cluster/stats/nodes/*"); + return jsonApi.perform(request, "Couldn't read Elasticsearch cluster stats"); + } + + @Override + public Map nodesInfo() { + final Request request = new Request("GET", "/_nodes"); + final JsonNode nodesJson = jsonApi.perform(request, "Couldn't read Elasticsearch nodes data!"); + + return toStream(nodesJson.at("/nodes").fields()) + .collect(Collectors.toMap(Map.Entry::getKey, o -> createNodeInfo(o.getValue()))); + } + + private NodeInfo createNodeInfo(JsonNode nodesJson) { + return NodeInfo.builder() + .version(nodesJson.at("/version").asText()) + .os(nodesJson.at("/os")) + .roles(toStream(nodesJson.at("/roles").elements()).map(JsonNode::asText).toList()) + .jvmMemHeapMaxInBytes(nodesJson.at("/jvm/mem/heap_max_in_bytes").asLong()) + .build(); + } + + public Stream toStream(Iterator iterator) { + return StreamSupport.stream(((Iterable) () -> iterator).spliterator(), false); + } + + @Override + public ShardStats shardStats() { + return clusterHealth() + .map(response -> ShardStats.create( + response.getNumberOfNodes(), + response.getNumberOfDataNodes(), + response.getActiveShards(), + response.getRelocatingShards(), + response.getActivePrimaryShards(), + response.getInitializingShards(), + response.getUnassignedShards(), + response.isTimedOut() + )) + .orElseThrow(() -> new ElasticsearchException("Unable to retrieve shard stats.")); + } + + private Optional clusterHealth() { + try { + final ClusterHealthRequest request = new ClusterHealthRequest() + .timeout(TimeValue.timeValueSeconds(Ints.saturatedCast(requestTimeout.toSeconds()))); + return Optional.of(client.execute((c, requestOptions) -> c.cluster().health(request, requestOptions))); + } catch (org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException e) { + if (LOG.isDebugEnabled()) { + LOG.error("{} ({})", e.getMessage(), Optional.ofNullable(e.getCause()).map(Throwable::getMessage).orElse("n/a"), e); + } else { + LOG.error("{} ({})", e.getMessage(), Optional.ofNullable(e.getCause()).map(Throwable::getMessage).orElse("n/a")); + } + return Optional.empty(); + } + } + + @Override + public Optional deflectorHealth(Collection indices) { + if (indices.isEmpty()) { + return Optional.of(HealthStatus.Green); + } + + final Map aliasMapping = catApi.aliases(); + final Set mappedIndices = indices + .stream() + .map(index -> aliasMapping.getOrDefault(index, index)) + .collect(Collectors.toSet()); + + final Set indexSummaries = catApi.indices() + .stream() + .filter(indexSummary -> mappedIndices.contains(indexSummary.index())) + .collect(Collectors.toSet()); + + if (indexSummaries.size() < mappedIndices.size()) { + return Optional.empty(); + } + + return indexSummaries.stream() + .map(IndexSummaryResponse::health) + .map(HealthStatus::fromString) + .min(HealthStatus::compareTo); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ComposableIndexTemplateAdapter.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ComposableIndexTemplateAdapter.java new file mode 100644 index 000000000000..26f925c91651 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ComposableIndexTemplateAdapter.java @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.ComposableIndexTemplateExistRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.DeleteComposableIndexTemplateRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.PutComposableIndexTemplateRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.compress.CompressedXContent; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentType; +import org.graylog2.indexer.indices.Template; + +import jakarta.inject.Inject; + +import java.io.IOException; + +public class ComposableIndexTemplateAdapter implements IndexTemplateAdapter { + private final ElasticsearchClient client; + private final ObjectMapper objectMapper; + + @Inject + public ComposableIndexTemplateAdapter(ElasticsearchClient client, ObjectMapper objectMapper) { + this.client = client; + this.objectMapper = objectMapper; + } + + @Override + public boolean ensureIndexTemplate(String templateName, Template template) { + var serializedMapping = serialize(template.mappings()); + var settings = Settings.builder().loadFromSource(serializeJson(template.settings()), XContentType.JSON).build(); + var esTemplate = new org.graylog.shaded.elasticsearch7.org.elasticsearch.cluster.metadata.Template(settings, serializedMapping, null); + var indexTemplate = new ComposableIndexTemplate(template.indexPatterns(), esTemplate, null, template.order(), null, null); + var request = new PutComposableIndexTemplateRequest() + .name(templateName) + .indexTemplate(indexTemplate); + + final AcknowledgedResponse result = client.execute((c, requestOptions) -> c.indices().putIndexTemplate(request, requestOptions), + "Unable to create index template " + templateName); + + return result.isAcknowledged(); + } + + private String serializeJson(Object obj) { + try { + return objectMapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + private CompressedXContent serialize(Object obj) { + try { + return new CompressedXContent(serializeJson(obj)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public boolean indexTemplateExists(String templateName) { + return client.execute((c, requestOptions) -> c.indices().existsIndexTemplate(new ComposableIndexTemplateExistRequest(templateName), + requestOptions), "Unable to verify index template existence " + templateName); + } + + @Override + public boolean deleteIndexTemplate(String templateName) { + var request = new DeleteComposableIndexTemplateRequest(templateName); + + final AcknowledgedResponse result = client.execute((c, requestOptions) -> c.indices().deleteIndexTemplate(request, requestOptions), + "Unable to delete index template " + templateName); + return result.isAcknowledged(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/CountsAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/CountsAdapterES7.java new file mode 100644 index 000000000000..353eb0448493 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/CountsAdapterES7.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog2.indexer.counts.CountsAdapter; + +import jakarta.inject.Inject; + +import java.util.List; + +public class CountsAdapterES7 implements CountsAdapter { + private final ElasticsearchClient client; + + @Inject + public CountsAdapterES7(ElasticsearchClient client) { + this.client = client; + } + + @Override + public long totalCount(List indices) { + final SearchSourceBuilder query = new SearchSourceBuilder() + .query(QueryBuilders.matchAllQuery()) + .size(0) + .trackTotalHits(true); + final SearchRequest searchRequest = new SearchRequest(indices.toArray(new String[0])) + .source(query); + + final SearchResponse result = client.search(searchRequest, "Fetching message count failed for indices "); + + return result.getHits().getTotalHits().value; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/DataStreamAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/DataStreamAdapterES7.java new file mode 100644 index 000000000000..54010604cca3 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/DataStreamAdapterES7.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog2.indexer.datastream.DataStreamAdapter; +import org.graylog2.indexer.datastream.Policy; +import org.graylog2.indexer.indices.Template; +import jakarta.annotation.Nonnull; + +public class DataStreamAdapterES7 implements DataStreamAdapter { + private static final String ERROR_MESSAGE = "Data Streams not supported in Elastic Search"; + + @Override + public boolean ensureDataStreamTemplate(@Nonnull String templateName, @Nonnull Template template, @Nonnull String timestampField) { + throw new UnsupportedOperationException(ERROR_MESSAGE); + } + + @Override + public void createDataStream(String dataStreamName) { + throw new UnsupportedOperationException(ERROR_MESSAGE); + } + + @Override + public void applyIsmPolicy(@Nonnull String dataStreamName, @Nonnull Policy policy) { + throw new UnsupportedOperationException(ERROR_MESSAGE); + } + + @Override + public void setNumberOfReplicas(@Nonnull String dataStreamName, @Nonnull int replicas) { + throw new UnsupportedOperationException(ERROR_MESSAGE); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ES7ResultMessageFactory.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ES7ResultMessageFactory.java new file mode 100644 index 000000000000..35602088412c --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ES7ResultMessageFactory.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.text.Text; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHit; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.fetch.subphase.highlight.HighlightField; +import org.graylog2.indexer.results.ResultMessage; +import org.graylog2.indexer.results.ResultMessageFactory; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class ES7ResultMessageFactory { + + private final ResultMessageFactory messageFactory; + + @Inject + public ES7ResultMessageFactory(ResultMessageFactory messageFactory) { + this.messageFactory = messageFactory; + } + + public ResultMessage fromSearchHit(SearchHit hit) { + final Map> highlights = hit.getHighlightFields().entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, ES7ResultMessageFactory::highlightsFromFragments)); + return messageFactory.parseFromSource(hit.getId(), hit.getIndex(), hit.getSourceAsMap(), highlights); + } + + private static List highlightsFromFragments(Map.Entry entry) { + return Arrays.stream(entry.getValue().fragments()) + .map(Text::toString) + .collect(Collectors.toList()); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Metadata.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Metadata.java new file mode 100644 index 000000000000..6acd9347cde5 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Metadata.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog2.plugin.PluginMetaData; +import org.graylog2.plugin.ServerStatus; +import org.graylog2.plugin.Version; + +import java.net.URI; +import java.util.Collections; +import java.util.Set; + +public class Elasticsearch7Metadata implements PluginMetaData { + @Override + public String getUniqueId() { + return Elasticsearch7Plugin.class.getCanonicalName(); + } + + @Override + public String getName() { + return "Elasticsearch 7 Support"; + } + + @Override + public String getAuthor() { + return "Graylog, Inc."; + } + + @Override + public URI getURL() { + return URI.create("https://www.graylog.org"); + } + + @Override + public Version getVersion() { + return Version.CURRENT_CLASSPATH; + } + + @Override + public String getDescription() { + return "Support for Elasticsearch 7"; + } + + @Override + public Version getRequiredVersion() { + return Version.CURRENT_CLASSPATH; + } + + @Override + public Set getRequiredCapabilities() { + return Collections.emptySet(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Module.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Module.java new file mode 100644 index 000000000000..16afb8073a25 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Module.java @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.inject.assistedinject.FactoryModuleBuilder; +import com.google.inject.binder.LinkedBindingBuilder; +import org.graylog.events.search.MoreSearchAdapter; +import org.graylog.plugins.views.migrations.V20200730000000_AddGl2MessageIdFieldAliasForEvents; +import org.graylog.plugins.views.search.engine.QuerySuggestionsService; +import org.graylog.shaded.elasticsearch7.org.apache.http.client.CredentialsProvider; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RestHighLevelClient; +import org.graylog.storage.elasticsearch7.client.ESCredentialsProvider; +import org.graylog.storage.elasticsearch7.fieldtypes.streams.StreamsForFieldRetrieverES7; +import org.graylog.storage.elasticsearch7.migrations.V20170607164210_MigrateReopenedIndicesToAliasesClusterStateES7; +import org.graylog.storage.elasticsearch7.views.migrations.V20200730000000_AddGl2MessageIdFieldAliasForEventsES7; +import org.graylog2.indexer.IndexToolsAdapter; +import org.graylog2.indexer.cluster.ClusterAdapter; +import org.graylog2.indexer.cluster.NodeAdapter; +import org.graylog2.indexer.counts.CountsAdapter; +import org.graylog2.indexer.datanode.ProxyRequestAdapter; +import org.graylog2.indexer.datanode.RemoteReindexingMigrationAdapter; +import org.graylog2.indexer.datastream.DataStreamAdapter; +import org.graylog2.indexer.fieldtypes.IndexFieldTypePollerAdapter; +import org.graylog2.indexer.fieldtypes.streamfiltered.esadapters.StreamsForFieldRetriever; +import org.graylog2.indexer.indices.IndicesAdapter; +import org.graylog2.indexer.messages.MessagesAdapter; +import org.graylog2.indexer.results.MultiChunkResultRetriever; +import org.graylog2.indexer.searches.SearchesAdapter; +import org.graylog2.migrations.V20170607164210_MigrateReopenedIndicesToAliases; +import org.graylog2.plugin.VersionAwareModule; +import org.graylog2.storage.SearchVersion; + +public class Elasticsearch7Module extends VersionAwareModule { + private final SearchVersion supportedVersion; + private final boolean useComposableIndexTemplates; + + public Elasticsearch7Module(final SearchVersion supportedVersion, boolean useComposableIndexTemplates) { + this.supportedVersion = supportedVersion; + this.useComposableIndexTemplates = useComposableIndexTemplates; + } + + @Override + protected void configure() { + bindForSupportedVersion(StreamsForFieldRetriever.class).to(StreamsForFieldRetrieverES7.class); + bindForSupportedVersion(CountsAdapter.class).to(CountsAdapterES7.class); + bindForSupportedVersion(ClusterAdapter.class).to(ClusterAdapterES7.class); + bindForSupportedVersion(IndicesAdapter.class).to(IndicesAdapterES7.class); + bindForSupportedVersion(DataStreamAdapter.class).to(DataStreamAdapterES7.class); + if (useComposableIndexTemplates) { + bind(IndexTemplateAdapter.class).to(ComposableIndexTemplateAdapter.class); + } else { + bind(IndexTemplateAdapter.class).to(LegacyIndexTemplateAdapter.class); + } + + bindForSupportedVersion(IndexFieldTypePollerAdapter.class).to(IndexFieldTypePollerAdapterES7.class); + bindForSupportedVersion(IndexToolsAdapter.class).to(IndexToolsAdapterES7.class); + bindForSupportedVersion(MessagesAdapter.class).to(MessagesAdapterES7.class); + bindForSupportedVersion(MultiChunkResultRetriever.class).to(PaginationES7.class); + bindForSupportedVersion(MoreSearchAdapter.class).to(MoreSearchAdapterES7.class); + bindForSupportedVersion(NodeAdapter.class).to(NodeAdapterES7.class); + bindForSupportedVersion(SearchesAdapter.class).to(SearchesAdapterES7.class); + bindForSupportedVersion(V20170607164210_MigrateReopenedIndicesToAliases.ClusterState.class) + .to(V20170607164210_MigrateReopenedIndicesToAliasesClusterStateES7.class); + bindForSupportedVersion(V20200730000000_AddGl2MessageIdFieldAliasForEvents.ElasticsearchAdapter.class) + .to(V20200730000000_AddGl2MessageIdFieldAliasForEventsES7.class); + + bindForSupportedVersion(QuerySuggestionsService.class).to(QuerySuggestionsES7.class); + bindForSupportedVersion(ProxyRequestAdapter.class).to(ProxyRequestAdapterES7.class); + + install(new FactoryModuleBuilder().build(ScrollResultES7.Factory.class)); + + bind(RestHighLevelClient.class).toProvider(RestHighLevelClientProvider.class); + bind(CredentialsProvider.class).toProvider(ESCredentialsProvider.class); + + bindForSupportedVersion(RemoteReindexingMigrationAdapter.class).to(UnsupportedRemoteReindexingMigrationAdapterES7.class); + } + + private LinkedBindingBuilder bindForSupportedVersion(Class interfaceClass) { + return bindForVersion(supportedVersion, interfaceClass); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Plugin.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Plugin.java new file mode 100644 index 000000000000..4f295d194e89 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Elasticsearch7Plugin.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.github.zafarkhaja.semver.Version; +import com.google.common.collect.ImmutableSet; +import jakarta.inject.Inject; +import org.graylog2.featureflag.FeatureFlags; +import org.graylog2.plugin.Plugin; +import org.graylog2.plugin.PluginMetaData; +import org.graylog2.plugin.PluginModule; +import org.graylog2.storage.SearchVersion; + +import java.util.Collection; + +import static org.graylog2.indexer.Constants.COMPOSABLE_INDEX_TEMPLATES_FEATURE; + +public class Elasticsearch7Plugin implements Plugin { + public static final SearchVersion SUPPORTED_ES_VERSION = SearchVersion.elasticsearch(7, 0, 0); + public static final SearchVersion SUPPORTED_OPENSEARCH_VERSION = SearchVersion.create(SearchVersion.Distribution.OPENSEARCH, Version.of(1, 0, 0)); + + @Inject + private FeatureFlags featureFlags; + + @Override + public PluginMetaData metadata() { + return new Elasticsearch7Metadata(); + } + + @Override + public Collection modules() { + var useComposableIndexTemplates = featureFlags.isOn(COMPOSABLE_INDEX_TEMPLATES_FEATURE); + return ImmutableSet.of( + new Elasticsearch7Module(SUPPORTED_ES_VERSION, useComposableIndexTemplates), + new ViewsESBackendModule(SUPPORTED_ES_VERSION), + new Elasticsearch7Module(SUPPORTED_OPENSEARCH_VERSION, useComposableIndexTemplates), + new ViewsESBackendModule(SUPPORTED_OPENSEARCH_VERSION) + ); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ElasticsearchClient.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ElasticsearchClient.java new file mode 100644 index 000000000000..885ece0cd2eb --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ElasticsearchClient.java @@ -0,0 +1,293 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.joschi.jadconfig.util.Duration; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Streams; +import io.opentelemetry.instrumentation.annotations.WithSpan; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.graylog.shaded.elasticsearch7.org.apache.http.ContentTooLongException; +import org.graylog.shaded.elasticsearch7.org.apache.http.client.config.RequestConfig; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchStatusException; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.MultiSearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.MultiSearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.PlainActionFuture; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RequestOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Response; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.ResponseException; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RestHighLevelClient; +import org.graylog.storage.errors.ResponseError; +import org.graylog2.indexer.BatchSizeTooLargeException; +import org.graylog2.indexer.IndexNotFoundException; +import org.graylog2.indexer.InvalidWriteTargetException; +import org.graylog2.indexer.MapperParsingException; +import org.graylog2.indexer.MasterNotDiscoveredException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkArgument; + +public class ElasticsearchClient { + private static final Pattern invalidWriteTarget = Pattern.compile("no write index is defined for alias \\[(?[\\w_]+)\\]"); + + private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchClient.class); + + private final RestHighLevelClient client; + private final boolean compressionEnabled; + private final Optional indexerMaxConcurrentSearches; + private final Optional indexerMaxConcurrentShardRequests; + private final ObjectMapper objectMapper; + + @Inject + public ElasticsearchClient(RestHighLevelClient client, + @Named("elasticsearch_compression_enabled") boolean compressionEnabled, + @Named("indexer_max_concurrent_searches") @Nullable Integer indexerMaxConcurrentSearches, + @Named("indexer_max_concurrent_shard_requests") @Nullable Integer indexerMaxConcurrentShardRequests, + ObjectMapper objectMapper) { + this.client = client; + this.compressionEnabled = compressionEnabled; + this.indexerMaxConcurrentSearches = Optional.ofNullable(indexerMaxConcurrentSearches); + this.indexerMaxConcurrentShardRequests = Optional.ofNullable(indexerMaxConcurrentShardRequests); + this.objectMapper = objectMapper; + } + + @VisibleForTesting + public ElasticsearchClient(RestHighLevelClient client, ObjectMapper objectMapper) { + this(client, false, null, null, objectMapper); + } + + public SearchResponse search(SearchRequest searchRequest, String errorMessage) { + final MultiSearchRequest multiSearchRequest = new MultiSearchRequest() + .add(searchRequest); + + final MultiSearchResponse result = this.execute((c, requestOptions) -> c.msearch(multiSearchRequest, requestOptions), errorMessage); + + return firstResponseFrom(result, errorMessage); + } + + public SearchResponse singleSearch(SearchRequest searchRequest, String errorMessage) { + return execute((c, requestOptions) -> c.search(searchRequest, requestOptions), errorMessage); + } + + public List msearch(List searchRequests, String errorMessage) { + final MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + + indexerMaxConcurrentSearches.ifPresent(multiSearchRequest::maxConcurrentSearchRequests); + indexerMaxConcurrentShardRequests.ifPresent(maxShardRequests -> searchRequests + .forEach(request -> request.setMaxConcurrentShardRequests(maxShardRequests))); + + searchRequests.forEach(multiSearchRequest::add); + + final MultiSearchResponse result = this.execute((c, requestOptions) -> c.msearch(multiSearchRequest, requestOptions), errorMessage); + + return Streams.stream(result) + .collect(Collectors.toList()); + } + + private SearchResponse firstResponseFrom(MultiSearchResponse result, String errorMessage) { + checkArgument(result != null); + checkArgument(result.getResponses().length == 1); + + final MultiSearchResponse.Item firstResponse = result.getResponses()[0]; + if (firstResponse.getResponse() == null) { + throw exceptionFrom(firstResponse.getFailure(), errorMessage); + } + + return firstResponse.getResponse(); + } + + public PlainActionFuture cancellableMsearch(final List searchRequests) { + var multiSearchRequest = new MultiSearchRequest(); + + indexerMaxConcurrentSearches.ifPresent(multiSearchRequest::maxConcurrentSearchRequests); + indexerMaxConcurrentShardRequests.ifPresent(maxShardRequests -> searchRequests + .forEach(request -> request.setMaxConcurrentShardRequests(maxShardRequests))); + + searchRequests.forEach(multiSearchRequest::add); + + final PlainActionFuture future = new PlainActionFuture<>(); + client.msearchAsync(multiSearchRequest, requestOptions(), future); + + return future; + } + + public R execute(ThrowingBiFunction fn) { + return execute(fn, "An error occurred: "); + } + + @WithSpan + public R execute(ThrowingBiFunction fn, String errorMessage) { + try { + return fn.apply(client, requestOptions()); + } catch (Exception e) { + throw exceptionFrom(e, errorMessage); + } + } + + @WithSpan + public R executeWithIOException(ThrowingBiFunction fn, String errorMessage) throws IOException { + try { + return fn.apply(client, requestOptions()); + } catch (IOException e) { + if (e.getCause() instanceof ContentTooLongException) { + throw new BatchSizeTooLargeException(e.getMessage()); + } + throw e; + } catch (Exception e) { + throw exceptionFrom(e, errorMessage); + } + } + + public JsonNode executeRequest(final Request request, final String errorMessage) { + return execute((c, requestOptions) -> { + final Response response = c.getLowLevelClient().performRequest(request); + return objectMapper.readTree(response.getEntity().getContent()); + }, errorMessage); + } + + private RequestOptions requestOptions() { + return compressionEnabled + ? RequestOptions.DEFAULT.toBuilder() + .addHeader("Accept-Encoding", "gzip") + .addHeader("Content-type", "application/json") + .build() + : RequestOptions.DEFAULT; + } + + private ElasticsearchException exceptionFrom(Exception e, String errorMessage) { + if (e instanceof ElasticsearchException elasticsearchException) { + if (isIndexNotFoundException(elasticsearchException)) { + throw IndexNotFoundException.create(errorMessage + elasticsearchException.getResourceId(), elasticsearchException.getIndex().getName()); + } + if (isMasterNotDiscoveredException(elasticsearchException)) { + throw new MasterNotDiscoveredException(); + } + if (isInvalidWriteTargetException(elasticsearchException)) { + final Matcher matcher = invalidWriteTarget.matcher(elasticsearchException.getMessage()); + if (matcher.find()) { + final String target = matcher.group("target"); + throw InvalidWriteTargetException.create(target); + } + } + if (isBatchSizeTooLargeException(elasticsearchException)) { + throw new BatchSizeTooLargeException(elasticsearchException.getMessage()); + } + if (isMapperParsingExceptionException(elasticsearchException)) { + throw new MapperParsingException(elasticsearchException.getMessage()); + } + } else if (e instanceof IOException && e.getCause() instanceof ContentTooLongException) { + throw new BatchSizeTooLargeException(e.getMessage()); + } + return new ElasticsearchException(errorMessage, e); + } + + private boolean isInvalidWriteTargetException(ElasticsearchException elasticsearchException) { + try { + final ParsedElasticsearchException parsedException = ParsedElasticsearchException.from(elasticsearchException.getMessage()); + return parsedException.reason().startsWith("no write index is defined for alias"); + } catch (Exception e) { + return false; + } + } + + private boolean isMasterNotDiscoveredException(ElasticsearchException elasticsearchException) { + try { + final ParsedElasticsearchException parsedException = ParsedElasticsearchException.from(elasticsearchException.getMessage()); + return parsedException.type().equals("master_not_discovered_exception") + || (parsedException.type().equals("cluster_block_exception") && parsedException.reason().contains("no master")); + } catch (Exception e) { + return false; + } + } + + private boolean isIndexNotFoundException(ElasticsearchException elasticsearchException) { + return elasticsearchException.getMessage().contains("index_not_found_exception"); + } + + private boolean isMapperParsingExceptionException(ElasticsearchException openSearchException) { + return openSearchException.getMessage().contains("mapper_parsing_exception"); + } + + private boolean isBatchSizeTooLargeException(ElasticsearchException elasticsearchException) { + if (elasticsearchException instanceof ElasticsearchStatusException statusException) { + if (statusException.getCause() instanceof ResponseException responseException) { + return (responseException.getResponse().getStatusLine().getStatusCode() == 429); + } + } + + try { + final ParsedElasticsearchException parsedException = ParsedElasticsearchException.from(elasticsearchException.getMessage()); + if (parsedException.type().equals("search_phase_execution_exception")) { + ParsedElasticsearchException parsedCause = ParsedElasticsearchException.from(elasticsearchException.getRootCause().getMessage()); + return parsedCause.reason().contains("Batch size is too large"); + } + } catch (Exception e) { + return false; + } + return false; + } + + public static RequestOptions withTimeout(RequestOptions requestOptions, Duration timeout) { + final RequestConfig.Builder requestConfigBuilder = (requestOptions == null || requestOptions.getRequestConfig() == null) + ? RequestConfig.custom() + : RequestConfig.copy(requestOptions.getRequestConfig()); + final RequestConfig requestConfigWithTimeout = requestConfigBuilder + .setSocketTimeout(Math.toIntExact(timeout.toMilliseconds())) + .build(); + final RequestOptions.Builder requestOptionsBuilder = requestOptions == null + ? RequestOptions.DEFAULT.toBuilder() + : requestOptions.toBuilder(); + return requestOptionsBuilder + .setRequestConfig(requestConfigWithTimeout) + .build(); + + } + + public Optional parseResponseException(ElasticsearchException ex) { + if (ex.getCause() != null) { + final Throwable[] suppressed = ex.getCause().getSuppressed(); + if (suppressed.length > 0) { + final Throwable realCause = suppressed[0]; + if (realCause instanceof ResponseException) { + try { + final ResponseError err = objectMapper.readValue(((ResponseException) realCause).getResponse().getEntity().getContent(), ResponseError.class); + return Optional.of(err); + } catch (IOException ioe) { + LOG.warn("Failed to parse exception", ioe); + } + } + } + } + return Optional.empty(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ElasticsearchFilterDeprecationWarningsInterceptor.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ElasticsearchFilterDeprecationWarningsInterceptor.java new file mode 100644 index 000000000000..5a0799ac3fc7 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ElasticsearchFilterDeprecationWarningsInterceptor.java @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog.shaded.elasticsearch7.org.apache.http.Header; +import org.graylog.shaded.elasticsearch7.org.apache.http.HttpException; +import org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse; +import org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponseInterceptor; +import org.graylog.shaded.elasticsearch7.org.apache.http.protocol.HttpContext; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +public class ElasticsearchFilterDeprecationWarningsInterceptor implements HttpResponseInterceptor { + private String[] messagesToFilter = { + "setting was deprecated in Elasticsearch", + "but in a future major version, direct access to system indices and their aliases will not be allowed", + "but in a future major version, directaccess to system indices and their aliases will not be allowed", + "but in a future major version, direct access to system indices will be prevented by default", + "in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch", + "parameter is deprecated because frozen indices have been deprecated.", + org.graylog.shaded.elasticsearch7.org.elasticsearch.common.joda.JodaDeprecationPatterns.USE_NEW_FORMAT_SPECIFIERS + }; + + private boolean isDeprecationMessage(final String message) { + for(String msg: messagesToFilter) { + if(message.contains(msg)) return true; + } + return false; + } + + @Override + public void process(HttpResponse response, HttpContext context) throws + HttpException, IOException { + List
warnings = Arrays.stream(response.getHeaders("Warning")).filter(header -> !this.isDeprecationMessage(header.getValue())).collect(Collectors.toList()); + response.removeHeaders("Warning"); + warnings.stream().forEach(header -> response.addHeader(header)); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/FilteredElasticsearchNodesSniffer.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/FilteredElasticsearchNodesSniffer.java new file mode 100644 index 000000000000..9a633d257d53 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/FilteredElasticsearchNodesSniffer.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Strings; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Node; + +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +class FilteredElasticsearchNodesSniffer implements NodesSniffer { + private final String attribute; + private final String value; + + static FilteredElasticsearchNodesSniffer create(String filter) { + final String attribute; + final String value; + if (!Strings.isNullOrEmpty(filter)) { + final String[] conditions = filter.split(":"); + if (conditions.length < 2) { + throw new IllegalArgumentException("Invalid filter specified for ES node discovery: " + filter); + } + attribute = conditions[0].trim(); + value = conditions[1].trim(); + } else { + attribute = null; + value = null; + } + + return new FilteredElasticsearchNodesSniffer(attribute, value); + } + + @VisibleForTesting + FilteredElasticsearchNodesSniffer(String attribute, String value) { + this.attribute = attribute; + this.value = value; + } + + @Override + public List sniff(List nodes) { + if (attribute == null || value == null) { + return nodes; + } + + return nodes.stream() + .filter(node -> nodeMatchesFilter(node, attribute, value)) + .collect(Collectors.toList()); + } + + private boolean nodeMatchesFilter(Node node, String attribute, String value) { + return node.getAttributes() + .getOrDefault(attribute, Collections.emptyList()) + .contains(value); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexFieldTypePollerAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexFieldTypePollerAdapterES7.java new file mode 100644 index 000000000000..7c1ef577bf07 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexFieldTypePollerAdapterES7.java @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.codahale.metrics.Timer; +import com.google.common.collect.Iterables; +import org.graylog.storage.elasticsearch7.mapping.FieldMappingApi; +import org.graylog2.Configuration; +import org.graylog2.indexer.IndexNotFoundException; +import org.graylog2.indexer.fieldtypes.FieldTypeDTO; +import org.graylog2.indexer.fieldtypes.IndexFieldTypePollerAdapter; +import org.graylog2.indexer.fieldtypes.streamfiltered.esadapters.StreamsForFieldRetriever; +import org.graylog2.shared.utilities.ExceptionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Inject; + +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +public class IndexFieldTypePollerAdapterES7 implements IndexFieldTypePollerAdapter { + private static final Logger LOG = LoggerFactory.getLogger(IndexFieldTypePollerAdapterES7.class); + private final FieldMappingApi fieldMappingApi; + private final boolean streamAwareFieldTypes; + private final StreamsForFieldRetriever streamsForFieldRetriever; + + @Inject + public IndexFieldTypePollerAdapterES7(final FieldMappingApi fieldMappingApi, + final Configuration configuration, + final StreamsForFieldRetriever streamsForFieldRetriever) { + this.fieldMappingApi = fieldMappingApi; + this.streamAwareFieldTypes = configuration.maintainsStreamAwareFieldTypes(); + this.streamsForFieldRetriever = streamsForFieldRetriever; + } + + @Override + public Optional> pollIndex(String indexName, Timer pollTimer) { + final Map fieldTypes; + try (final Timer.Context ignored = pollTimer.time()) { + fieldTypes = fieldMappingApi.fieldTypes(indexName); + } catch (IndexNotFoundException e) { + if (LOG.isDebugEnabled()) { + LOG.error("Couldn't get mapping for index <{}>", indexName, e); + } else { + LOG.error("Couldn't get mapping for index <{}>: {}", indexName, ExceptionUtils.getRootCauseMessage(e)); + } + return Optional.empty(); + } + + final Map filteredFieldTypes = fieldTypes.entrySet() + .stream() + // The "type" value is empty if we deal with a nested data type + // TODO: Figure out how to handle nested fields, for now we only support the top-level fields + .filter(field -> !field.getValue().type().isEmpty()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + if (!streamAwareFieldTypes) { + return Optional.of(filteredFieldTypes.entrySet() + .stream() + .map(field -> fromFieldNameAndMapping(field.getKey(), field.getValue()) + .streams(Set.of()) + .build()) + .collect(Collectors.toSet())); + } else { + Set result = new HashSet<>(); + final Iterable>> partitioned = Iterables.partition(filteredFieldTypes.entrySet(), MAX_SEARCHES_PER_MULTI_SEARCH); + for (var batch : partitioned) { + final Map> streams = streamsForFieldRetriever.getStreams(batch.stream().map(Map.Entry::getKey).collect(Collectors.toList()), indexName); + batch.stream() + .map(entry -> fromFieldNameAndMapping(entry.getKey(), entry.getValue()) + .streams(streams.get(entry.getKey())) + .build() + ) + .forEach(result::add); + + } + return Optional.of(result); + } + + } + + private FieldTypeDTO.Builder fromFieldNameAndMapping(final String fieldName, final FieldMappingApi.FieldMapping mapping) { + final Boolean fieldData = mapping.fielddata().orElse(false); + return FieldTypeDTO.builder() + .fieldName(fieldName) + .physicalType(mapping.type()) + .properties(fieldData ? + Collections.singleton(FieldTypeDTO.Properties.FIELDDATA) + : Set.of()); + } + + @Override + public boolean maintainsStreamBasedFieldLists() { + return streamAwareFieldTypes; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexTemplateAdapter.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexTemplateAdapter.java new file mode 100644 index 000000000000..05cdfb63d75f --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexTemplateAdapter.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog2.indexer.indices.Template; + +interface IndexTemplateAdapter { + boolean ensureIndexTemplate(String templateName, Template template); + + boolean indexTemplateExists(String templateName); + + boolean deleteIndexTemplate(String templateName); +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexToolsAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexToolsAdapterES7.java new file mode 100644 index 000000000000..f39e59e77f27 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndexToolsAdapterES7.java @@ -0,0 +1,148 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.core.CountRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.core.CountResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog2.indexer.IndexToolsAdapter; +import org.graylog2.plugin.Message; +import org.graylog2.plugin.streams.Stream; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import jakarta.inject.Inject; + +import java.time.ZonedDateTime; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.termsQuery; + +public class IndexToolsAdapterES7 implements IndexToolsAdapter { + private static final String AGG_DATE_HISTOGRAM = "source_date_histogram"; + private static final String AGG_MESSAGE_FIELD = "message_field"; + private static final String AGG_FILTER = "message_filter"; + private final ElasticsearchClient client; + + @Inject + public IndexToolsAdapterES7(ElasticsearchClient client) { + this.client = client; + } + + @Override + public Map> fieldHistogram(String fieldName, Set indices, Optional> includedStreams, long interval) { + final BoolQueryBuilder queryBuilder = buildStreamIdFilter(includedStreams); + + final FilterAggregationBuilder the_filter = AggregationBuilders.filter(AGG_FILTER, queryBuilder) + .subAggregation(AggregationBuilders.dateHistogram(AGG_DATE_HISTOGRAM) + .field("timestamp") + .subAggregation(AggregationBuilders.terms(AGG_MESSAGE_FIELD).field(fieldName)) + .fixedInterval(new DateHistogramInterval(interval + "ms")) + // We use "min_doc_count" here to avoid empty buckets in the histogram result. + // This is needed to avoid out-of-memory errors when creating a histogram for a really large + // date range. See: https://github.com/Graylog2/graylog-plugin-archive/issues/59 + .minDocCount(1L)); + + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(QueryBuilders.matchAllQuery()) + .aggregation(the_filter); + + final SearchRequest searchRequest = new SearchRequest() + .source(searchSourceBuilder) + .indices(indices.toArray(new String[0])); + final SearchResponse searchResult = client.search(searchRequest, "Unable to retrieve field histogram."); + + final Filter filterAggregation = searchResult.getAggregations().get(AGG_FILTER); + final ParsedDateHistogram dateHistogram = filterAggregation.getAggregations().get(AGG_DATE_HISTOGRAM); + + + final List histogramBuckets = (List) dateHistogram.getBuckets(); + final Map> result = Maps.newHashMapWithExpectedSize(histogramBuckets.size()); + + for (ParsedDateHistogram.ParsedBucket bucket : histogramBuckets) { + final ZonedDateTime zonedDateTime = (ZonedDateTime) bucket.getKey(); + final DateTime date = new DateTime(zonedDateTime.toInstant().toEpochMilli(), DateTimeZone.UTC); + + final Terms sourceFieldAgg = bucket.getAggregations().get(AGG_MESSAGE_FIELD); + final List termBuckets = sourceFieldAgg.getBuckets(); + + final HashMap termCounts = Maps.newHashMapWithExpectedSize(termBuckets.size()); + + for (Terms.Bucket termBucket : termBuckets) { + termCounts.put(termBucket.getKeyAsString(), termBucket.getDocCount()); + } + + result.put(date, termCounts); + } + + return ImmutableMap.copyOf(result); + } + + @Override + public long count(Set indices, Optional> includedStreams) { + final CountRequest request = new CountRequest(indices.toArray(new String[0]), buildStreamIdFilter(includedStreams)) + .indicesOptions(IndicesOptions.fromOptions(true, false, true, false)); + + final CountResponse result = client.execute((c, requestOptions) -> c.count(request, requestOptions), "Unable to count documents of index."); + + return result.getCount(); + } + + private BoolQueryBuilder buildStreamIdFilter(Optional> includedStreams) { + final BoolQueryBuilder queryBuilder = boolQuery().must(matchAllQuery()); + + // If the included streams are not present, we do not filter on streams + if (includedStreams.isPresent()) { + final Set streams = includedStreams.get(); + final BoolQueryBuilder filterBuilder = boolQuery(); + + // If the included streams set contains the default stream, we also want all documents which do not + // have any stream assigned. Those documents have basically been in the "default stream" which didn't + // exist in Graylog <2.2.0. + if (streams.contains(Stream.DEFAULT_STREAM_ID)) { + filterBuilder.should(boolQuery().mustNot(existsQuery(Message.FIELD_STREAMS))); + } + + // Only select messages which are assigned to the given streams + filterBuilder.should(termsQuery(Message.FIELD_STREAMS, streams)); + + queryBuilder.filter(filterBuilder); + } + + return queryBuilder; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndicesAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndicesAdapterES7.java new file mode 100644 index 000000000000..7d604e8461db --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/IndicesAdapterES7.java @@ -0,0 +1,647 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.fasterxml.jackson.databind.JsonNode; +import com.github.joschi.jadconfig.util.Duration; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchType; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.GetAliasesResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Requests; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.CloseIndexRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.CreateIndexRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.DeleteAliasRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.GetMappingsRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.PutMappingRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.cluster.metadata.AliasMetadata; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.unit.TimeValue; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.reindex.ReindexRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.Max; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.Min; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.storage.elasticsearch7.blocks.BlockSettingsParser; +import org.graylog.storage.elasticsearch7.cat.CatApi; +import org.graylog.storage.elasticsearch7.cluster.ClusterStateApi; +import org.graylog.storage.elasticsearch7.stats.ClusterStatsApi; +import org.graylog.storage.elasticsearch7.stats.StatsApi; +import org.graylog2.datatiering.WarmIndexInfo; +import org.graylog2.indexer.IndexNotFoundException; +import org.graylog2.indexer.indices.HealthStatus; +import org.graylog2.indexer.indices.IndexMoveResult; +import org.graylog2.indexer.indices.IndexSettings; +import org.graylog2.indexer.indices.Indices; +import org.graylog2.indexer.indices.IndicesAdapter; +import org.graylog2.indexer.indices.ShardsInfo; +import org.graylog2.indexer.indices.Template; +import org.graylog2.indexer.indices.blocks.IndicesBlockStatus; +import org.graylog2.indexer.indices.stats.IndexStatistics; +import org.graylog2.indexer.searches.IndexRangeStats; +import org.graylog2.plugin.Message; +import org.graylog2.rest.resources.system.indexer.responses.IndexSetStats; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static java.util.stream.Collectors.toList; +import static org.graylog.storage.elasticsearch7.ElasticsearchClient.withTimeout; + +public class IndicesAdapterES7 implements IndicesAdapter { + private static final Logger LOG = LoggerFactory.getLogger(IndicesAdapterES7.class); + private final ElasticsearchClient client; + private final StatsApi statsApi; + private final ClusterStatsApi clusterStatsApi; + private final CatApi catApi; + private final ClusterStateApi clusterStateApi; + private final IndexTemplateAdapter indexTemplateAdapter; + + // this is the maximum amount of bytes that the index list is supposed to fill in a request, + // it assumes that these don't need url encoding. If we exceed the maximum, we request settings for all indices + // and filter after wards + private final int MAX_INDICES_URL_LENGTH = 3000; + + @Inject + public IndicesAdapterES7(ElasticsearchClient client, + StatsApi statsApi, + ClusterStatsApi clusterStatsApi, + CatApi catApi, + ClusterStateApi clusterStateApi, + IndexTemplateAdapter indexTemplateAdapter) { + this.client = client; + this.statsApi = statsApi; + this.clusterStatsApi = clusterStatsApi; + this.catApi = catApi; + this.clusterStateApi = clusterStateApi; + this.indexTemplateAdapter = indexTemplateAdapter; + } + + @Override + public void move(String source, String target, Consumer resultCallback) { + final ReindexRequest request = new ReindexRequest(); + request.setSourceIndices(source); + request.setDestIndex(target); + + final BulkByScrollResponse result = client.execute((c, requestOptions) -> c.reindex(request, requestOptions)); + + final IndexMoveResult indexMoveResult = IndexMoveResult.create( + Math.toIntExact(result.getTotal()), + result.getTook().millis(), + !result.getBulkFailures().isEmpty() + ); + resultCallback.accept(indexMoveResult); + } + + @Override + public void delete(String index) { + final DeleteIndexRequest request = new DeleteIndexRequest(index); + + client.execute((c, requestOptions) -> c.indices().delete(request, requestOptions)); + } + + @Override + public Set resolveAlias(String alias) { + final GetAliasesRequest request = new GetAliasesRequest() + .aliases(alias); + final GetAliasesResponse result = client.execute((c, requestOptions) -> c.indices().getAlias(request, requestOptions)); + + return result.getAliases().keySet(); + } + + @Override + public void create(String index, IndexSettings indexSettings) { + executeCreateIndexRequest(index, createIndexRequest(index, indexSettings, null)); + } + + @Override + public void create(String index, IndexSettings indexSettings, @Nullable Map mapping) { + executeCreateIndexRequest(index, createIndexRequest(index, indexSettings, mapping)); + } + + private CreateIndexRequest createIndexRequest(String index, + IndexSettings indexSettings, + @Nullable Map mapping) { + CreateIndexRequest request = new CreateIndexRequest(index).settings(indexSettings.map()); + if (mapping != null) { + request = request.mapping(mapping); + } + return request; + } + + private void executeCreateIndexRequest(String index, CreateIndexRequest request) { + client.execute((c, requestOptions) -> c.indices().create(request, requestOptions), + "Unable to create index " + index); + } + + @Override + public void updateIndexMapping(@Nonnull String indexName, + @Nonnull String mappingType, + @Nonnull Map mapping) { + + final PutMappingRequest request = new PutMappingRequest(indexName) + .source(mapping); + + client.execute((c, requestOptions) -> c.indices().putMapping(request, requestOptions), + "Unable to update index mapping " + indexName); + } + + @Override + public Map getIndexMapping(@Nonnull String index) { + final GetMappingsRequest request = new GetMappingsRequest() + .indices(index) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); + + return client.execute((c, requestOptions) -> c.indices().getMapping(request, requestOptions).mappings().get(index).sourceAsMap(), + "Couldn't read mapping of index " + index); + } + + @Override + public Map getFlattenIndexSettings(@Nonnull String index) { + + final GetSettingsRequest getSettingsRequest = new GetSettingsRequest() + .indices(index) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); + + return client.execute((c, requestOptions) -> { + final GetSettingsResponse settingsResponse = c.indices().getSettings(getSettingsRequest, requestOptions); + Settings settings = settingsResponse.getIndexToSettings().get(index); + ImmutableMap.Builder builder = ImmutableMap.builder(); + settings.keySet().forEach(k -> Optional.ofNullable(settings.get(k)).ifPresent(v -> builder.put(k, v))); + return builder.build(); + }, "Couldn't read settings of index " + index); + } + + @Override + public void updateIndexMetaData(@Nonnull String index, @Nonnull Map metadata, boolean mergeExisting) { + Map metaUpdate = new HashMap<>(); + if (mergeExisting) { + final Map oldMetaData = getIndexMetaData(index); + metaUpdate.putAll(oldMetaData); + } + metaUpdate.putAll(metadata); + updateIndexMapping(index, "ignored", Map.of("_meta", metaUpdate)); + } + + @Override + public Map getIndexMetaData(@Nonnull String index) { + final Object metaData = getIndexMapping(index).get("_meta"); + //noinspection rawtypes + if (metaData instanceof Map map) { + //noinspection unchecked + return map; + } + return Map.of(); + } + + @Override + public boolean ensureIndexTemplate(String templateName, Template template) { + return indexTemplateAdapter.ensureIndexTemplate(templateName, template); + } + + @Override + public boolean indexTemplateExists(String templateName) { + return indexTemplateAdapter.indexTemplateExists(templateName); + } + + @Override + public boolean deleteIndexTemplate(String templateName) { + return indexTemplateAdapter.deleteIndexTemplate(templateName); + } + + @Override + public Optional indexCreationDate(String index) { + final GetSettingsRequest request = new GetSettingsRequest() + .indices(index) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); + + final GetSettingsResponse result = client.execute((c, requestOptions) -> c.indices().getSettings(request, requestOptions), + "Couldn't read settings of index " + index); + + final Optional creationDate = Optional.ofNullable(result.getIndexToSettings().get(index)) + .map(indexSettings -> indexSettings.get("index.creation_date")); + + return creationDate + .map(Long::valueOf) + .map(instant -> new DateTime(instant, DateTimeZone.UTC)); + } + + @Override + public Optional indexClosingDate(String index) { + final Map indexMetaData = getIndexMetaData(index); + return Optional.ofNullable(indexMetaData.get("closing_date")).filter(Long.class::isInstance).map(Long.class::cast) + .map(instant -> new DateTime(instant, DateTimeZone.UTC)); + } + + @Override + public void openIndex(String index) { + final OpenIndexRequest request = new OpenIndexRequest(index); + + client.execute((c, requestOptions) -> c.indices().open(request, requestOptions), + "Unable to open index " + index); + } + + @Override + public void setReadOnly(String index) { + // https://www.elastic.co/guide/en/elasticsearch/reference/7.8/indices-update-settings.html + // https://www.elastic.co/guide/en/elasticsearch/reference/7.10/index-modules-blocks.html + final Map settings = ImmutableMap.of( + "index", ImmutableMap.of("blocks", + ImmutableMap.of( + "write", true, // Block writing. + "read", false, // Allow reading. + "metadata", false) // Allow getting metadata. + ) + ); + + final UpdateSettingsRequest request = new UpdateSettingsRequest(index) + .settings(settings); + client.execute((c, requestOptions) -> c.indices().putSettings(request, requestOptions), + "Couldn't set index " + index + " to read-only"); + } + + @Override + public void flush(String index) { + final FlushRequest request = new FlushRequest(index); + + client.execute((c, requestOptions) -> c.indices().flush(request, requestOptions), + "Unable to flush index " + index); + } + + @Override + public void markIndexReopened(String index) { + final String aliasName = index + Indices.REOPENED_ALIAS_SUFFIX; + final IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); + final IndicesAliasesRequest.AliasActions aliasAction = new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.ADD) + .index(index) + .alias(aliasName); + indicesAliasesRequest.addAliasAction(aliasAction); + + client.execute((c, requestOptions) -> c.indices().updateAliases(indicesAliasesRequest, requestOptions), + "Couldn't create reopened alias for index " + index); + } + + @Override + public void removeAlias(String index, String alias) { + final DeleteAliasRequest request = new DeleteAliasRequest(index, alias); + + client.execute((c, requestOptions) -> c.indices().deleteAlias(request, requestOptions), + "Unable to remove alias " + alias + ", pointing to " + index); + } + + @Override + public void close(String index) { + final CloseIndexRequest request = new CloseIndexRequest(index); + + client.execute((c, requestOptions) -> c.indices().close(request, requestOptions), + "Unable to close index " + index); + } + + @Override + public long numberOfMessages(String index) { + final JsonNode result = statsApi.indexStats(index); + final JsonNode count = result.path("_all").path("primaries").path("docs").path("count"); + if (count.isMissingNode()) { + throw new RuntimeException("Unable to extract count from response."); + } + return count.asLong(); + } + + private GetSettingsResponse settingsFor(String indexOrAlias) { + final GetSettingsRequest request = new GetSettingsRequest().indices(indexOrAlias) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED); + return client.execute((c, requestOptions) -> c.indices().getSettings(request, requestOptions), + "Unable to retrieve settings for index/alias " + indexOrAlias); + } + + @Override + public Map> aliases(String indexPattern) { + final GetAliasesRequest request = new GetAliasesRequest() + .indices(indexPattern) + .indicesOptions(IndicesOptions.fromOptions(false, false, true, false)); + final GetAliasesResponse result = client.execute((c, requestOptions) -> c.indices().getAlias(request, requestOptions), + "Couldn't collect aliases for index pattern " + indexPattern); + return result.getAliases() + .entrySet() + .stream() + .collect(Collectors.toMap( + Map.Entry::getKey, + entry -> entry.getValue().stream().map(AliasMetadata::alias).collect(Collectors.toSet()) + )); + } + + @Override + public Map> fieldsInIndices(String[] writeIndexWildcards) { + final List indexWildCards = Arrays.asList(writeIndexWildcards); + return clusterStateApi.fields(indexWildCards); + } + + @Override + public Set closedIndices(Collection indices) { + return catApi.indices(indices, Collections.singleton("close"), + "Unable to retrieve list of closed indices for " + indices); + } + + @Override + public Set indicesStats(Collection indices) { + final ImmutableSet.Builder result = ImmutableSet.builder(); + + final JsonNode allWithShardLevel = statsApi.indexStatsWithShardLevel(indices); + final Iterator> fields = allWithShardLevel.fields(); + while (fields.hasNext()) { + final Map.Entry entry = fields.next(); + final String index = entry.getKey(); + final JsonNode indexStats = entry.getValue(); + if (indexStats.isObject()) { + result.add(IndexStatistics.create(index, indexStats)); + } + } + + return result.build(); + } + + @Override + public Optional getIndexStats(String index) { + final JsonNode indexStats = statsApi.indexStatsWithShardLevel(index); + return indexStats.isMissingNode() + ? Optional.empty() + : Optional.of(IndexStatistics.create(index, indexStats)); + } + + @Override + public JsonNode getIndexStats(Collection indices) { + return statsApi.indexStatsWithDocsAndStore(indices); + } + + @Override + public IndexSetStats getIndexSetStats() { + return clusterStatsApi.clusterStats(); + } + + @Override + public List getShardsInfo(String indexName) { + return catApi.getShardsInfo(indexName); + } + + @Override + public IndicesBlockStatus getIndicesBlocksStatus(final List indices) { + if (indices == null || indices.isEmpty()) { + throw new IllegalArgumentException("Expecting list of indices with at least one index present."); + } + + final GetSettingsRequest request = new GetSettingsRequest() + .indicesOptions(IndicesOptions.fromOptions(false, true, true, true)) + .names("index.blocks.read", "index.blocks.write", "index.blocks.metadata", "index.blocks.read_only", "index.blocks.read_only_allow_delete"); + + final var maxLengthExceeded = String.join(",", indices).length() > MAX_INDICES_URL_LENGTH; + final GetSettingsRequest getSettingsRequest = maxLengthExceeded ? request : request.indices(indices.toArray(new String[]{})); + + return client.execute((c, requestOptions) -> { + final GetSettingsResponse settingsResponse = c.indices().getSettings(getSettingsRequest, requestOptions); + return BlockSettingsParser.parseBlockSettings(settingsResponse, maxLengthExceeded ? Optional.of(indices) : Optional.empty()); + }); + } + + @Override + public boolean exists(String index) { + final GetSettingsResponse result = settingsFor(index); + return result.getIndexToSettings().size() == 1 && result.getIndexToSettings().containsKey(index); + } + + @Override + public boolean aliasExists(String alias) { + final GetAliasesRequest request = new GetAliasesRequest(alias); + return client.execute((c, requestOptions) -> c.indices().existsAlias(request, requestOptions)); + } + + @Override + public Set indices(String indexWildcard, List status, String indexSetId) { + return catApi.indices(indexWildcard, status, "Couldn't get index list for index set <" + indexSetId + ">"); + } + + @Override + public Optional storeSizeInBytes(String index) { + return statsApi.storeSizes(index); + } + + @Override + public void cycleAlias(String aliasName, String targetIndex) { + final IndicesAliasesRequest.AliasActions addAlias = new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.ADD) + .index(targetIndex) + .alias(aliasName); + final IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest() + .addAliasAction(addAlias); + + client.execute((c, requestOptions) -> c.indices().updateAliases(indicesAliasesRequest, requestOptions), + "Couldn't point alias " + aliasName + " to index " + targetIndex); + } + + @Override + public void cycleAlias(String aliasName, String targetIndex, String oldIndex) { + final IndicesAliasesRequest.AliasActions addAlias = new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.ADD) + .index(targetIndex) + .alias(aliasName); + final IndicesAliasesRequest.AliasActions removeAlias = new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.REMOVE) + .index(oldIndex) + .alias(aliasName); + final IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest() + .addAliasAction(removeAlias) + .addAliasAction(addAlias); + + client.execute((c, requestOptions) -> c.indices().updateAliases(indicesAliasesRequest, requestOptions), + "Couldn't switch alias " + aliasName + " from index " + oldIndex + " to index " + targetIndex); + } + + @Override + public void removeAliases(Set indices, String alias) { + final IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); + final IndicesAliasesRequest.AliasActions aliasAction = IndicesAliasesRequest.AliasActions.remove() + .alias(alias) + .indices(indices.toArray(new String[0])); + indicesAliasesRequest.addAliasAction(aliasAction); + + client.execute((c, requestOptions) -> c.indices().updateAliases(indicesAliasesRequest, requestOptions), + "Couldn't remove alias " + alias + " from indices " + indices); + } + + @Override + public void optimizeIndex(String index, int maxNumSegments, Duration timeout) { + final ForceMergeRequest request = new ForceMergeRequest() + .indices(index) + .maxNumSegments(maxNumSegments) + .flush(true); + + client.execute((c, requestOptions) -> c.indices().forcemerge(request, withTimeout(requestOptions, timeout))); + } + + @Override + public IndexRangeStats indexRangeStatsOfIndex(String index) { + final FilterAggregationBuilder builder = AggregationBuilders.filter("agg", QueryBuilders.existsQuery(Message.FIELD_TIMESTAMP)) + .subAggregation(AggregationBuilders.min("ts_min").field(Message.FIELD_TIMESTAMP)) + .subAggregation(AggregationBuilders.max("ts_max").field(Message.FIELD_TIMESTAMP)) + .subAggregation(AggregationBuilders.terms("streams").size(Integer.MAX_VALUE).field(Message.FIELD_STREAMS)); + final SearchSourceBuilder query = SearchSourceBuilder.searchSource() + .aggregation(builder) + .size(0); + + final SearchRequest request = new SearchRequest() + .source(query) + .indices(index) + .searchType(SearchType.DFS_QUERY_THEN_FETCH) + .indicesOptions(IndicesOptions.lenientExpandOpen()); + + final SearchResponse result = client.execute((c, requestOptions) -> c.search(request, requestOptions), + "Couldn't build index range of index " + index); + + if (result.getTotalShards() == 0 || result.getAggregations() == null) { + throw new IndexNotFoundException("Couldn't build index range of index " + index + " because it doesn't exist."); + } + final Filter f = result.getAggregations().get("agg"); + if (f == null) { + throw new IndexNotFoundException("Couldn't build index range of index " + index + " because it doesn't exist."); + } else if (f.getDocCount() == 0L) { + LOG.debug("No documents with attribute \"timestamp\" found in index <{}>", index); + return IndexRangeStats.EMPTY; + } + + final Min minAgg = f.getAggregations().get("ts_min"); + final long minUnixTime = Double.valueOf(minAgg.getValue()).longValue(); + final DateTime min = new DateTime(minUnixTime, DateTimeZone.UTC); + final Max maxAgg = f.getAggregations().get("ts_max"); + final long maxUnixTime = Double.valueOf(maxAgg.getValue()).longValue(); + final DateTime max = new DateTime(maxUnixTime, DateTimeZone.UTC); + // make sure we return an empty list, so we can differentiate between old indices that don't have this information + // and newer ones that simply have no streams. + final Terms streams = f.getAggregations().get("streams"); + final List streamIds = streams.getBuckets().stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(toList()); + + return IndexRangeStats.create(min, max, streamIds); + } + + @Override + public HealthStatus waitForRecovery(String index) { + return waitForRecovery(index, 30); + } + + @Override + public HealthStatus waitForRecovery(String index, int timeout) { + final ClusterHealthRequest clusterHealthRequest = new ClusterHealthRequest(index).timeout(TimeValue.timeValueSeconds(timeout)); + clusterHealthRequest.waitForGreenStatus(); + + final ClusterHealthResponse result = client.execute((c, requestOptions) -> c.cluster().health(clusterHealthRequest, requestOptions)); + return HealthStatus.fromString(result.getStatus().toString()); + } + + @Override + public boolean isOpen(String index) { + return indexHasState(index, State.Open); + } + + @Override + public boolean isClosed(String index) { + return indexHasState(index, State.Closed); + } + + @Override + public void refresh(String... indices) { + final RefreshRequest refreshRequest = Requests.refreshRequest(indices); + client.execute((c, requestOptions) -> c.indices().refresh(refreshRequest, requestOptions)); + } + + private Boolean indexHasState(String index, State open) { + return indexState(index) + .map(state -> state.equals(open)) + .orElseThrow(() -> new IndexNotFoundException("Unable to determine state for absent index " + index)); + } + + private Optional indexState(String index) { + final Optional result = catApi.indexState(index, "Unable to retrieve index stats for " + index); + + return result.map((State::parse)); + } + + enum State { + Open, + Closed; + + static State parse(String state) { + switch (state.toLowerCase(Locale.ENGLISH)) { + case "open": + return Open; + case "close": + return Closed; + } + + throw new IllegalStateException("Unable to parse invalid index state: " + state); + } + } + + @Override + public String getIndexId(String index) { + final GetSettingsRequest request = new GetSettingsRequest().indices(index) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED); + final GetSettingsResponse response = client.execute((c, requestOptions) -> c.indices().getSettings(request, requestOptions), + "Unable to retrieve settings for index/alias " + index); + return response.getSetting(index, "index.uuid"); + } + + //Snapshots not supported for ES + @Override + public Optional getWarmIndexInfo(String index) { + return Optional.empty(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/LegacyIndexTemplateAdapter.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/LegacyIndexTemplateAdapter.java new file mode 100644 index 000000000000..30eda66d7276 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/LegacyIndexTemplateAdapter.java @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.IndexTemplatesExistRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.PutIndexTemplateRequest; +import org.graylog2.indexer.indices.Template; + +import jakarta.inject.Inject; + +import java.util.Map; + +public class LegacyIndexTemplateAdapter implements IndexTemplateAdapter { + private final ElasticsearchClient client; + + @Inject + public LegacyIndexTemplateAdapter(ElasticsearchClient client) { + this.client = client; + } + + @Override + public boolean ensureIndexTemplate(String templateName, Template template) { + final Map templateSource = Map.of( + "index_patterns", template.indexPatterns(), + "mappings", template.mappings(), + "settings", template.settings(), + "order", template.order() + ); + final PutIndexTemplateRequest request = new PutIndexTemplateRequest(templateName) + .source(templateSource); + + final AcknowledgedResponse result = client.execute((c, requestOptions) -> c.indices().putTemplate(request, requestOptions), + "Unable to create index template " + templateName); + + return result.isAcknowledged(); + } + + @Override + public boolean indexTemplateExists(String templateName) { + return client.execute((c, requestOptions) -> c.indices().existsTemplate(new IndexTemplatesExistRequest(templateName), + requestOptions), "Unable to verify index template existence " + templateName); + } + + @Override + public boolean deleteIndexTemplate(String templateName) { + final DeleteIndexTemplateRequest request = new DeleteIndexTemplateRequest(templateName); + + final AcknowledgedResponse result = client.execute((c, requestOptions) -> c.indices().deleteTemplate(request, requestOptions), + "Unable to delete index template " + templateName); + return result.isAcknowledged(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/MessagesAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/MessagesAdapterES7.java new file mode 100644 index 000000000000..1fe362a73d2d --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/MessagesAdapterES7.java @@ -0,0 +1,230 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.codahale.metrics.Meter; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.bulk.BulkItemResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.bulk.BulkRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.bulk.BulkResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.get.GetRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.get.GetResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.index.IndexRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.AnalyzeRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.AnalyzeResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentType; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.rest.RestStatus; +import org.graylog2.indexer.messages.ChunkedBulkIndexer; +import org.graylog2.indexer.messages.DocumentNotFoundException; +import org.graylog2.indexer.messages.Indexable; +import org.graylog2.indexer.messages.IndexingError; +import org.graylog2.indexer.messages.IndexingRequest; +import org.graylog2.indexer.messages.IndexingResult; +import org.graylog2.indexer.messages.IndexingResults; +import org.graylog2.indexer.messages.IndexingSuccess; +import org.graylog2.indexer.messages.Messages; +import org.graylog2.indexer.messages.MessagesAdapter; +import org.graylog2.indexer.messages.SerializationContext; +import org.graylog2.indexer.results.ResultMessage; +import org.graylog2.indexer.results.ResultMessageFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static com.codahale.metrics.MetricRegistry.name; + +public class MessagesAdapterES7 implements MessagesAdapter { + static final String INDEX_BLOCK_ERROR = "cluster_block_exception"; + static final String MAPPER_PARSING_EXCEPTION = "mapper_parsing_exception"; + static final String INDEX_BLOCK_REASON = "blocked by: [TOO_MANY_REQUESTS/12/index read-only / allow delete (api)"; + static final String FLOOD_STAGE_WATERMARK = "blocked by: [TOO_MANY_REQUESTS/12/disk usage exceeded flood-stage watermark"; + static final String UNAVAILABLE_SHARDS_EXCEPTION = "unavailable_shards_exception"; + static final String PRIMARY_SHARD_NOT_ACTIVE_REASON = "primary shard is not active"; + + static final String ILLEGAL_ARGUMENT_EXCEPTION = "illegal_argument_exception"; + static final String NO_WRITE_INDEX_DEFINED_FOR_ALIAS = "no write index is defined for alias"; + + static final String CIRCUIT_BREAKING_EXCEPTION = "circuit_breaking_exception"; + static final String DATA_TOO_LARGE = "Data too large"; + + private final ResultMessageFactory resultMessageFactory; + private final ElasticsearchClient client; + private final Meter invalidTimestampMeter; + private final ChunkedBulkIndexer chunkedBulkIndexer; + private final ObjectMapper objectMapper; + + @Inject + public MessagesAdapterES7(ResultMessageFactory resultMessageFactory, ElasticsearchClient elasticsearchClient, + MetricRegistry metricRegistry, ChunkedBulkIndexer chunkedBulkIndexer, ObjectMapper objectMapper) { + this.resultMessageFactory = resultMessageFactory; + this.client = elasticsearchClient; + this.invalidTimestampMeter = metricRegistry.meter(name(Messages.class, "invalid-timestamps")); + this.chunkedBulkIndexer = chunkedBulkIndexer; + this.objectMapper = objectMapper; + } + + @Override + public ResultMessage get(String messageId, String index) throws DocumentNotFoundException { + final GetRequest getRequest = new GetRequest(index, messageId); + + final GetResponse result = this.client.execute((c, requestOptions) -> c.get(getRequest, requestOptions)); + + if (!result.isExists()) { + throw new DocumentNotFoundException(index, messageId); + } + + return resultMessageFactory.parseFromSource(messageId, index, result.getSource()); + } + + @Override + public List analyze(String toAnalyze, String index, String analyzer) { + final AnalyzeRequest analyzeRequest = AnalyzeRequest.withIndexAnalyzer(index, analyzer, toAnalyze); + + final AnalyzeResponse result = client.execute((c, requestOptions) -> c.indices().analyze(analyzeRequest, requestOptions)); + return result.getTokens().stream() + .map(AnalyzeResponse.AnalyzeToken::getTerm) + .collect(Collectors.toList()); + } + + @Override + public IndexingResults bulkIndex(List messageList) throws IOException { + return chunkedBulkIndexer.index(messageList, this::runBulkRequest); + } + + private ChunkedBulkIndexer.BulkIndexResult runBulkRequest(int indexedSuccessfully, IndexingResults previousResults, List chunk) throws ChunkedBulkIndexer.EntityTooLargeException { + final BulkRequest bulkRequest = createBulkRequest(chunk); + + final BulkResponse result; + try { + result = this.client.execute((c, requestOptions) -> c.bulk(bulkRequest, requestOptions)); + } catch (ElasticsearchException e) { + for (ElasticsearchException cause : e.guessRootCauses()) { + if (cause.status().equals(RestStatus.REQUEST_ENTITY_TOO_LARGE)) { + throw new ChunkedBulkIndexer.EntityTooLargeException(indexedSuccessfully, previousResults); + } + if (cause.status().equals(RestStatus.TOO_MANY_REQUESTS)) { + if (cause.getDetailedMessage().contains(CIRCUIT_BREAKING_EXCEPTION)) { + throw new ChunkedBulkIndexer.CircuitBreakerException(indexedSuccessfully, previousResults, durabilityFrom(cause)); + } + throw new ChunkedBulkIndexer.TooManyRequestsException(indexedSuccessfully, previousResults); + } + } + throw new org.graylog2.indexer.ElasticsearchException(e); + } + return new ChunkedBulkIndexer.BulkIndexResult(indexingResultsFrom(result, chunk), result::buildFailureMessage, result.getItems().length); + } + + private ChunkedBulkIndexer.CircuitBreakerException.Durability durabilityFrom(ElasticsearchException elasticsearchException) { + return Optional.ofNullable(elasticsearchException.getMetadata("es.durability")) + .map(durabilities -> durabilities.get(0)) + .map(durability -> switch (durability) { + case "TRANSIENT" -> ChunkedBulkIndexer.CircuitBreakerException.Durability.Transient; + case "PERMANENT" -> ChunkedBulkIndexer.CircuitBreakerException.Durability.Permanent; + default -> throw new IllegalStateException("Invalid durability: " + durability); + }) + .orElse(ChunkedBulkIndexer.CircuitBreakerException.Durability.Permanent); + } + + private BulkRequest createBulkRequest(List chunk) { + final BulkRequest bulkRequest = new BulkRequest(); + chunk.forEach(request -> bulkRequest.add( + indexRequestFrom(request) + )); + return bulkRequest; + } + + private IndexingResults indexingResultsFrom(BulkResponse response, List request) { + final Map> partitionedResults = Arrays.stream(response.getItems()).collect(Collectors.partitioningBy(BulkItemResponse::isFailed)); + final List failures = partitionedResults.get(true); + final List successes = partitionedResults.get(false); + + final Map messageMap = request.stream() + .map(IndexingRequest::message) + .distinct() + .collect(Collectors.toMap(Indexable::getId, Function.identity(), (a, b) -> a)); + + return IndexingResults.create(indexingSuccessFrom(successes, messageMap), indexingErrorsFrom(failures, messageMap)); + } + + private List indexingErrorsFrom(List failedItems, Map messageMap) { + return indexingResultsFrom(failedItems, messageMap) + .stream().filter(IndexingError.class::isInstance).map(IndexingError.class::cast).toList(); + } + + private List indexingSuccessFrom(List failedItems, Map messageMap) { + return indexingResultsFrom(failedItems, messageMap) + .stream().filter(IndexingSuccess.class::isInstance).map(IndexingSuccess.class::cast).toList(); + } + + private List indexingResultsFrom(List responses, Map messageMap) { + return responses.stream() + .map(item -> { + final Indexable message = messageMap.get(item.getId()); + return indexingResultFromResponse(item, message); + }) + .collect(Collectors.toList()); + } + + private IndexingResult indexingResultFromResponse(BulkItemResponse response, Indexable message) { + if (response.isFailed()) { + return IndexingError.create(message, response.getIndex(), errorTypeFromResponse(response), response.getFailureMessage()); + } + return IndexingSuccess.create(message, response.getIndex()); + } + + private IndexingError.Type errorTypeFromResponse(BulkItemResponse item) { + final ParsedElasticsearchException exception = ParsedElasticsearchException.from(item.getFailureMessage()); + switch (exception.type()) { + case MAPPER_PARSING_EXCEPTION: + return IndexingError.Type.MappingError; + case INDEX_BLOCK_ERROR: + if (exception.reason().contains(INDEX_BLOCK_REASON) || exception.reason().contains(FLOOD_STAGE_WATERMARK)) + return IndexingError.Type.IndexBlocked; + case UNAVAILABLE_SHARDS_EXCEPTION: + if (exception.reason().contains(PRIMARY_SHARD_NOT_ACTIVE_REASON)) + return IndexingError.Type.IndexBlocked; + case ILLEGAL_ARGUMENT_EXCEPTION: + if (exception.reason().contains(NO_WRITE_INDEX_DEFINED_FOR_ALIAS)) + return IndexingError.Type.IndexBlocked; + case CIRCUIT_BREAKING_EXCEPTION: + if (exception.reason().contains(DATA_TOO_LARGE)) + return IndexingError.Type.DataTooLarge; + default: + return IndexingError.Type.Unknown; + } + } + + private IndexRequest indexRequestFrom(IndexingRequest request) { + final byte[] body; + try { + body = request.message().serialize(SerializationContext.of(objectMapper, this.invalidTimestampMeter)); + } catch (IOException e) { + throw new RuntimeException(e); + } + return new IndexRequest(request.indexSet().getWriteIndexAlias()) + .id(request.message().getId()) + .source(body, XContentType.JSON); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/MoreSearchAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/MoreSearchAdapterES7.java new file mode 100644 index 000000000000..32d8e39ccee0 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/MoreSearchAdapterES7.java @@ -0,0 +1,303 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.common.base.Stopwatch; +import com.google.common.collect.Streams; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.graylog.events.event.EventDto; +import org.graylog.events.processor.EventProcessorException; +import org.graylog.events.search.MoreSearch; +import org.graylog.events.search.MoreSearchAdapter; +import org.graylog.plugins.views.search.searchfilters.model.UsedSearchFilter; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.ToXContent; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.histogram.ParsedAutoDateHistogram; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.FieldSortBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.SortOrder; +import org.graylog2.indexer.results.ChunkedResult; +import org.graylog2.indexer.results.MultiChunkResultRetriever; +import org.graylog2.indexer.results.ResultChunk; +import org.graylog2.indexer.results.ResultMessage; +import org.graylog2.indexer.searches.ChunkCommand; +import org.graylog2.indexer.searches.Sorting; +import org.graylog2.plugin.Message; +import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; + +import static com.google.common.base.Strings.isNullOrEmpty; +import static java.util.Objects.requireNonNull; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.termsQuery; + +public class MoreSearchAdapterES7 implements MoreSearchAdapter { + private static final Logger LOG = LoggerFactory.getLogger(MoreSearchAdapterES7.class); + public static final IndicesOptions INDICES_OPTIONS = IndicesOptions.LENIENT_EXPAND_OPEN; + private static final String termsAggregationName = "alert_type"; + private static final String histogramAggregationName = "histogram"; + private final ES7ResultMessageFactory resultMessageFactory; + private final ElasticsearchClient client; + private final Boolean allowLeadingWildcard; + private final SortOrderMapper sortOrderMapper; + private final MultiChunkResultRetriever multiChunkResultRetriever; + + @Inject + public MoreSearchAdapterES7(ES7ResultMessageFactory resultMessageFactory, + ElasticsearchClient client, + @Named("allow_leading_wildcard_searches") Boolean allowLeadingWildcard, + SortOrderMapper sortOrderMapper, + MultiChunkResultRetriever multiChunkResultRetriever) { + this.resultMessageFactory = resultMessageFactory; + this.client = client; + this.allowLeadingWildcard = allowLeadingWildcard; + this.sortOrderMapper = sortOrderMapper; + this.multiChunkResultRetriever = multiChunkResultRetriever; + } + + @Override + public MoreSearch.Result eventSearch(String queryString, TimeRange timerange, Set affectedIndices, + Sorting sorting, int page, int perPage, Set eventStreams, + String filterString, Set forbiddenSourceStreams, Map> extraFilters) { + final var filter = createQuery(queryString, timerange, eventStreams, filterString, forbiddenSourceStreams, extraFilters); + + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(filter) + .from((page - 1) * perPage) + .size(perPage) + .trackTotalHits(true); + final var sortBuilders = createSorting(sorting); + sortBuilders.forEach(searchSourceBuilder::sort); + + final Set indices = affectedIndices.isEmpty() ? Collections.singleton("") : affectedIndices; + final SearchRequest searchRequest = new SearchRequest(indices.toArray(new String[0])) + .source(searchSourceBuilder) + .indicesOptions(INDICES_OPTIONS); + + if (LOG.isDebugEnabled()) { + LOG.debug("Query:\n{}", searchSourceBuilder.toString(new ToXContent.MapParams(Collections.singletonMap("pretty", "true")))); + LOG.debug("Execute search: {}", searchRequest); + } + + final SearchResponse searchResult = client.search(searchRequest, "Unable to perform search query"); + + final List hits = Streams.stream(searchResult.getHits()) + .map(resultMessageFactory::fromSearchHit) + .collect(Collectors.toList()); + + final long total = searchResult.getHits().getTotalHits().value; + + return MoreSearch.Result.builder() + .results(hits) + .resultsCount(total) + .duration(searchResult.getTook().getMillis()) + .usedIndexNames(affectedIndices) + .executedQuery(searchSourceBuilder.toString()) + .build(); + } + + @Override + public MoreSearch.Histogram eventHistogram(int buckets, String queryString, AbsoluteRange timerange, Set affectedIndices, + Set eventStreams, String filterString, Set forbiddenSourceStreams, ZoneId timeZone, + Map> extraFilters) { + final var filter = createQuery(queryString, timerange, eventStreams, filterString, forbiddenSourceStreams, extraFilters); + + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(filter) + .size(0) + .trackTotalHits(true); + + final var histogramAggregation = new AutoDateHistogramAggregationBuilder(histogramAggregationName) + .field(EventDto.FIELD_EVENT_TIMESTAMP) + .timeZone(timeZone) + .setNumBuckets(buckets); + + final var termsAggregation = AggregationBuilders.terms(termsAggregationName) + .field(EventDto.FIELD_ALERT); + + searchSourceBuilder.aggregation(histogramAggregation.subAggregation(termsAggregation)); + + final Set indices = affectedIndices.isEmpty() ? Collections.singleton("") : affectedIndices; + final SearchRequest searchRequest = new SearchRequest(indices.toArray(new String[0])) + .source(searchSourceBuilder) + .indicesOptions(INDICES_OPTIONS); + + if (LOG.isDebugEnabled()) { + LOG.debug("Query:\n{}", searchSourceBuilder.toString(new ToXContent.MapParams(Collections.singletonMap("pretty", "true")))); + LOG.debug("Execute search: {}", searchRequest); + } + + final SearchResponse searchResult = client.search(searchRequest, "Unable to perform search query"); + + final ParsedAutoDateHistogram histogramResult = searchResult.getAggregations().get(histogramAggregationName); + final var histogramBuckets = histogramResult.getBuckets(); + + final var alerts = new ArrayList(histogramBuckets.size()); + final var events = new ArrayList(histogramBuckets.size()); + + histogramBuckets.forEach(bucket -> { + final var parsedTerms = (ParsedTerms) bucket.getAggregations().get(termsAggregationName); + final var dateTime = (ZonedDateTime) bucket.getKey(); + final var alertCount = Optional.ofNullable(parsedTerms.getBucketByKey("true")).map(MultiBucketsAggregation.Bucket::getDocCount).orElse(0L); + final var eventCount = Optional.ofNullable(parsedTerms.getBucketByKey("false")).map(MultiBucketsAggregation.Bucket::getDocCount).orElse(0L); + alerts.add(new MoreSearch.Histogram.Bucket(dateTime, alertCount)); + events.add(new MoreSearch.Histogram.Bucket(dateTime, eventCount)); + }); + + return new MoreSearch.Histogram(new MoreSearch.Histogram.EventsBuckets(events, alerts)); + } + + private QueryBuilder createQuery(String queryString, TimeRange timerange, Set eventStreams, String filterString, + Set forbiddenSourceStreams, Map> extraFilters) { + final QueryBuilder query = (queryString.isEmpty() || queryString.equals("*")) + ? matchAllQuery() + : queryStringQuery(queryString).allowLeadingWildcard(allowLeadingWildcard); + + final BoolQueryBuilder filter = boolQuery() + .filter(query) + .filter(termsQuery(EventDto.FIELD_STREAMS, eventStreams)) + .filter(requireNonNull(TimeRangeQueryFactory.create(timerange))); + + extraFilters.entrySet() + .stream() + .flatMap(extraFilter -> extraFilter.getValue() + .stream() + .map(value -> buildExtraFilter(extraFilter.getKey(), value))) + .forEach(filter::filter); + + if (!isNullOrEmpty(filterString)) { + filter.filter(queryStringQuery(filterString)); + } + + if (!forbiddenSourceStreams.isEmpty()) { + // If an event has any stream in "source_streams" that the calling search user is not allowed to access, + // the event must not be in the search result. + filter.filter(boolQuery().mustNot(termsQuery(EventDto.FIELD_SOURCE_STREAMS, forbiddenSourceStreams))); + } + + return filter; + } + + private QueryBuilder buildExtraFilter(String field, String value) { + return QueryBuilders.multiMatchQuery(value, field); + } + + private List createSorting(Sorting sorting) { + final SortOrder order = sortOrderMapper.fromSorting(sorting); + final List sortBuilders; + if (EventDto.FIELD_TIMERANGE_START.equals(sorting.getField())) { + sortBuilders = List.of( + new FieldSortBuilder(EventDto.FIELD_TIMERANGE_START), + new FieldSortBuilder(EventDto.FIELD_TIMERANGE_END) + ); + } else { + sortBuilders = List.of(new FieldSortBuilder(sorting.getField())); + } + return sortBuilders.stream() + .map(sortBuilder -> { + sorting.getUnmappedType().ifPresent(unmappedType -> sortBuilder + .unmappedType(unmappedType) + .missing(order.equals(SortOrder.ASC) ? "_first" : "_last")); + return sortBuilder.order(order); + }) + .toList(); + } + + @Override + public void scrollEvents(String queryString, TimeRange timeRange, Set affectedIndices, Set streams, + List filters, int batchSize, ScrollEventsCallback resultCallback) throws EventProcessorException { + final ChunkCommand chunkCommand = buildScrollCommand(queryString, timeRange, affectedIndices, filters, streams, batchSize); + + final ChunkedResult chunkedResult = multiChunkResultRetriever.retrieveChunkedResult(chunkCommand); + + final AtomicBoolean continueScrolling = new AtomicBoolean(true); + + final Stopwatch stopwatch = Stopwatch.createStarted(); + try { + ResultChunk resultChunk = chunkedResult.nextChunk(); + while (continueScrolling.get() && resultChunk != null) { + final List messages = resultChunk.messages(); + + LOG.debug("Passing <{}> messages to callback", messages.size()); + resultCallback.accept(Collections.unmodifiableList(messages), continueScrolling); + + // Stop if the resultCallback told us to stop + if (!continueScrolling.get()) { + break; + } + + resultChunk = chunkedResult.nextChunk(); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } finally { + try { + // Tell Elasticsearch that we are done with the scroll so it can release resources as soon as possible + // instead of waiting for the scroll timeout to kick in. + chunkedResult.cancel(); + } catch (Exception ignored) { + } + LOG.debug("Scrolling done - took {} ms", stopwatch.stop().elapsed(TimeUnit.MILLISECONDS)); + } + } + + private ChunkCommand buildScrollCommand(String queryString, TimeRange timeRange, Set affectedIndices, List filters, Set streams, int batchSize) { + ChunkCommand.Builder commandBuilder = ChunkCommand.builder() + .query(queryString) + .range(timeRange) + .indices(affectedIndices) + .filters(filters == null ? Collections.emptyList() : filters) + .batchSize(batchSize) + // For correlation need the oldest messages to come in first + .sorting(new Sorting(Message.FIELD_TIMESTAMP, Sorting.Direction.ASC)); + + if (!streams.isEmpty()) { + commandBuilder = commandBuilder.streams(streams); + } + + return commandBuilder + .build(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodeAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodeAdapterES7.java new file mode 100644 index 000000000000..89f538cebbfc --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodeAdapterES7.java @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog2.indexer.cluster.NodeAdapter; +import org.graylog2.shared.utilities.StringUtils; +import org.graylog2.storage.SearchVersion; + +import jakarta.inject.Inject; + +import java.util.Optional; + +public class NodeAdapterES7 implements NodeAdapter { + private final PlainJsonApi jsonApi; + + @Inject + public NodeAdapterES7(ElasticsearchClient client, ObjectMapper objectMapper) { + this.jsonApi = new PlainJsonApi(objectMapper, client); + } + + NodeAdapterES7(final PlainJsonApi jsonApi) { + this.jsonApi = jsonApi; + } + + @Override + public Optional version() { + + final Request request = new Request("GET", "/?filter_path=version.number,version.distribution"); + final Optional resp = Optional.of(jsonApi.perform(request, "Unable to retrieve cluster information")); + + final Optional version = resp.map(r -> r.path("version")).map(r -> r.path("number")).map(JsonNode::textValue); + + final SearchVersion.Distribution distribution = resp.map(r -> r.path("version")).map(r -> r.path("distribution")).map(JsonNode::textValue) + .map(StringUtils::toUpperCase) + .map(SearchVersion.Distribution::valueOf) + .orElse(SearchVersion.Distribution.ELASTICSEARCH); + + return version + .map(this::parseVersion) + .map(v -> SearchVersion.create(distribution, v)); + } + + +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodeListSniffer.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodeListSniffer.java new file mode 100644 index 000000000000..832f116de7ae --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodeListSniffer.java @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.common.collect.Sets; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Node; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +public class NodeListSniffer implements NodesSniffer { + private static final Logger LOG = LoggerFactory.getLogger(NodeListSniffer.class); + private static final Set savedNodes = ConcurrentHashMap.newKeySet(); + + static NodeListSniffer create() { + return new NodeListSniffer(); + } + + @Override + public List sniff(final List nodes) { + final Set currentNodes = nodes.stream().map(n -> n.getHost().toURI()).collect(Collectors.toSet()); + + final Set nodesAdded = Sets.difference(currentNodes, savedNodes); + final Set nodesDropped = Sets.difference(savedNodes, currentNodes); + + if(!nodesAdded.isEmpty()) { + LOG.info("Added node(s): {}", nodesAdded); + } + if(!nodesDropped.isEmpty()) { + LOG.info("Dropped node(s): {}", nodesDropped); + } + if(!nodesAdded.isEmpty() || !nodesDropped.isEmpty()) { + LOG.info("Current node list: {}", currentNodes); + } + + savedNodes.clear(); + savedNodes.addAll(currentNodes); + + return nodes; + } +} + diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodesSniffer.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodesSniffer.java new file mode 100644 index 000000000000..656e9687dc4b --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/NodesSniffer.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Node; + +import java.io.IOException; +import java.util.List; + +public interface NodesSniffer { + List sniff(List nodes) throws IOException; +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PaginationES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PaginationES7.java new file mode 100644 index 000000000000..140fa42b1abc --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PaginationES7.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog2.indexer.results.ChunkedResult; +import org.graylog2.indexer.results.MultiChunkResultRetriever; +import org.graylog2.indexer.results.ResultMessageFactory; +import org.graylog2.indexer.searches.ChunkCommand; + +import java.util.Set; + +public class PaginationES7 implements MultiChunkResultRetriever { + private final ResultMessageFactory resultMessageFactory; + private final ElasticsearchClient client; + private final SearchRequestFactory searchRequestFactory; + + @Inject + public PaginationES7(final ResultMessageFactory resultMessageFactory, + final ElasticsearchClient client, + final SearchRequestFactory searchRequestFactory) { + this.resultMessageFactory = resultMessageFactory; + this.client = client; + this.searchRequestFactory = searchRequestFactory; + } + + @Override + public ChunkedResult retrieveChunkedResult(final ChunkCommand chunkCommand) { + final SearchSourceBuilder searchQuery = searchRequestFactory.create(chunkCommand); + final SearchRequest request = buildSearchRequest(searchQuery, chunkCommand.indices()); + final SearchResponse result = client.search(request, "Unable to perform search-after pagination search"); + return new PaginationResultES7(resultMessageFactory, client, request, result, searchQuery.toString(), chunkCommand.fields(), chunkCommand.limit().orElse(-1)); + } + + private SearchRequest buildSearchRequest(final SearchSourceBuilder query, + final Set indices) { + return new SearchRequest(indices.toArray(new String[0])) + .source(query) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PaginationResultES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PaginationResultES7.java new file mode 100644 index 000000000000..efac410aee2d --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PaginationResultES7.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.inject.assistedinject.Assisted; +import com.google.inject.assistedinject.AssistedInject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHit; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog2.indexer.results.ResultMessageFactory; + +import javax.annotation.Nullable; +import java.io.IOException; +import java.util.List; + +public class PaginationResultES7 extends ChunkedQueryResultES7 { + private SearchRequest initialSearchRequest; + + @AssistedInject + public PaginationResultES7(ResultMessageFactory resultMessageFactory, + ElasticsearchClient client, + SearchRequest initialSearchRequest, + @Assisted SearchResponse initialResult, + @Assisted("query") String query, + @Assisted List fields, + @Assisted int limit) { + super(resultMessageFactory, client, initialResult, query, fields, limit); + this.initialSearchRequest = initialSearchRequest; + } + + @Override + @Nullable + protected SearchResponse nextSearchResult() throws IOException { + final SearchSourceBuilder initialQuery = initialSearchRequest.source(); + final SearchHit[] hits = lastSearchResponse.getHits().getHits(); + if (hits == null || hits.length == 0) { + return null; + } + initialQuery.searchAfter(hits[hits.length - 1].getSortValues()); + initialSearchRequest.source(initialQuery); + return client.executeWithIOException((c, requestOptions) -> c.search(initialSearchRequest, requestOptions), + "Unable to retrieve next chunk from search: "); + } + + @Override + protected String getChunkingMethodName() { + return "search-after pagination"; + } + + @Override + public void cancel() { + //not needed for pagination + } + +} + diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ParsedElasticsearchException.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ParsedElasticsearchException.java new file mode 100644 index 000000000000..b2982e132ccf --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ParsedElasticsearchException.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.auto.value.AutoValue; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@AutoValue +public abstract class ParsedElasticsearchException { + private static final Pattern exceptionPattern = Pattern + .compile("(ElasticsearchException\\[)?Elasticsearch exception \\[type=(?[\\w_]+), (?:reason=(?.+?)(\\]+;|\\]$))"); + + public abstract String type(); + public abstract String reason(); + + public static ParsedElasticsearchException create(String type, String reason) { + return new AutoValue_ParsedElasticsearchException(type, reason); + } + + public static ParsedElasticsearchException from(String s) { + final Matcher matcher = exceptionPattern.matcher(s); + if (matcher.find()) { + final String type = matcher.group("type"); + final String reason = matcher.group("reason"); + + return create(type, reason); + } + + throw new IllegalArgumentException("Unable to parse Elasticsearch exception: " + s); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PlainJsonApi.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PlainJsonApi.java new file mode 100644 index 000000000000..b9bb4b72716d --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/PlainJsonApi.java @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Response; + +import jakarta.inject.Inject; + +public class PlainJsonApi { + private final ObjectMapper objectMapper; + private final ElasticsearchClient client; + + @Inject + public PlainJsonApi(ObjectMapper objectMapper, + ElasticsearchClient client) { + this.objectMapper = objectMapper; + this.client = client; + } + + public JsonNode perform(Request request, String errorMessage) { + return client.execute((c, requestOptions) -> { + request.setOptions(requestOptions); + final Response response = c.getLowLevelClient().performRequest(request); + return objectMapper.readTree(response.getEntity().getContent()); + }, errorMessage); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ProxyRequestAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ProxyRequestAdapterES7.java new file mode 100644 index 000000000000..59f7f9a1e06a --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ProxyRequestAdapterES7.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog2.indexer.datanode.ProxyRequestAdapter; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +public class ProxyRequestAdapterES7 implements ProxyRequestAdapter { + private static final String ERROR_MESSAGE = "This functionality is only available when the Data Node is used."; + + @Override + public ProxyResponse request(ProxyRequest request) throws IOException { + return new ProxyResponse(400, new ByteArrayInputStream(ERROR_MESSAGE.getBytes(StandardCharsets.UTF_8)), "text/plain"); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/QuerySuggestionsES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/QuerySuggestionsES7.java new file mode 100644 index 000000000000..3330f1a85b1a --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/QuerySuggestionsES7.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.common.collect.ImmutableMap; +import org.graylog.plugins.views.search.elasticsearch.IndexLookup; +import org.graylog.plugins.views.search.engine.QuerySuggestionsService; +import org.graylog.plugins.views.search.engine.suggestions.SuggestionEntry; +import org.graylog.plugins.views.search.engine.suggestions.SuggestionError; +import org.graylog.plugins.views.search.engine.suggestions.SuggestionRequest; +import org.graylog.plugins.views.search.engine.suggestions.SuggestionResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.ScriptQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.script.Script; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.script.ScriptType; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.suggest.SuggestBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.suggest.SuggestBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.suggest.term.TermSuggestion; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.suggest.term.TermSuggestionBuilder; +import org.graylog.storage.errors.ResponseError; +import org.graylog2.plugin.Message; + +import jakarta.inject.Inject; + +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +public class QuerySuggestionsES7 implements QuerySuggestionsService { + + private final ElasticsearchClient client; + private final IndexLookup indexLookup; + + @Inject + public QuerySuggestionsES7(ElasticsearchClient client, IndexLookup indexLookup) { + this.client = client; + this.indexLookup = indexLookup; + } + + @Override + public SuggestionResponse suggest(SuggestionRequest req) { + final Set affectedIndices = indexLookup.indexNamesForStreamsInTimeRange(req.streams(), req.timerange()); + final TermSuggestionBuilder suggestionBuilder = SuggestBuilders.termSuggestion(req.field()).text(req.input()).size(req.size()); + final BoolQueryBuilder query = QueryBuilders.boolQuery() + .filter(QueryBuilders.termsQuery(Message.FIELD_STREAMS, req.streams())) + .filter(TimeRangeQueryFactory.create(req.timerange())) + .filter(QueryBuilders.existsQuery(req.field())) + .filter(getPrefixQuery(req)); + final SearchSourceBuilder search = new SearchSourceBuilder() + .query(query) + .size(0) + .aggregation(AggregationBuilders.terms("fieldvalues").field(req.field()).size(req.size())) + .suggest(new SuggestBuilder().addSuggestion("corrections", suggestionBuilder)); + + try { + final SearchResponse result = client.singleSearch(new SearchRequest(affectedIndices.toArray(new String[]{})).source(search), "Failed to execute aggregation"); + final ParsedTerms fieldValues = result.getAggregations().get("fieldvalues"); + final List entries = fieldValues.getBuckets().stream().map(b -> new SuggestionEntry(b.getKeyAsString(), b.getDocCount())).collect(Collectors.toList()); + + if (!entries.isEmpty()) { + return SuggestionResponse.forSuggestions(req.field(), req.input(), entries, fieldValues.getSumOfOtherDocCounts()); + } else { + TermSuggestion suggestion = result.getSuggest().getSuggestion("corrections"); + final List corrections = suggestion.getEntries().stream().flatMap(e -> e.getOptions().stream()).map(o -> new SuggestionEntry(o.getText().string(), o.getFreq())).collect(Collectors.toList()); + return SuggestionResponse.forSuggestions(req.field(), req.input(), corrections, null); + } + + + } catch (org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException exception) { + final SuggestionError err = tryResponseException(exception) + .orElseGet(() -> parseException(exception)); + return SuggestionResponse.forError(req.field(), req.input(), err); + } + } + + + private QueryBuilder getPrefixQuery(SuggestionRequest req) { + return switch (req.fieldType()) { + case TEXTUAL -> QueryBuilders.prefixQuery(req.field(), req.input()); + default -> getScriptedPrefixQuery(req); + }; + } + + /** + * Unlike prefix query, this scripted implementation works also for numerical fields. + * TODO: would it make sense to switch between this scripted implementation and the standard prefix + * query based on our information about the field type? Would it be faster? + */ + private static ScriptQueryBuilder getScriptedPrefixQuery(SuggestionRequest req) { + final Script script = new Script(ScriptType.INLINE, "painless", + "String val = doc[params.field].value.toString(); return val.startsWith(params.input);", + ImmutableMap.of("field", req.field(), "input", req.input())); + return QueryBuilders.scriptQuery(script); + } + + private Optional tryResponseException(org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException exception) { + return client.parseResponseException(exception) + .map(ResponseError::error) + .flatMap(e -> e.rootCause().stream().findFirst()) + .map(e -> SuggestionError.create(e.type(), e.reason())); + } + + private SuggestionError parseException(org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException exception) { + final Throwable cause = getCause(exception); + try { + final ParsedElasticsearchException parsed = ParsedElasticsearchException.from(cause.toString()); + return SuggestionError.create(parsed.type(), parsed.reason()); + } catch (final IllegalArgumentException iae) { + return SuggestionError.create("Aggregation error", cause.getMessage()); + } + } + + private Throwable getCause(Throwable exception) { + if (exception.getCause() != null) { + return getCause(exception.getCause()); + } else { + return exception; + } + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/RestHighLevelClientProvider.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/RestHighLevelClientProvider.java new file mode 100644 index 000000000000..128f3cf9f90c --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/RestHighLevelClientProvider.java @@ -0,0 +1,147 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.github.joschi.jadconfig.util.Duration; +import com.google.common.base.Suppliers; +import org.graylog.shaded.elasticsearch7.org.apache.http.HttpHost; +import org.graylog.shaded.elasticsearch7.org.apache.http.client.CredentialsProvider; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RestClient; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RestClientBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RestHighLevelClient; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.sniff.ElasticsearchNodesSniffer; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.sniff.NodesSniffer; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.sniff.Sniffer; +import org.graylog2.configuration.IndexerHosts; +import org.graylog2.system.shutdown.GracefulShutdownService; + +import javax.annotation.Nullable; + +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.inject.Provider; +import jakarta.inject.Singleton; + +import java.net.URI; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +@Singleton +public class RestHighLevelClientProvider implements Provider { + private final Supplier clientSupplier; + + @SuppressWarnings("unused") + @Inject + public RestHighLevelClientProvider( + GracefulShutdownService shutdownService, + @IndexerHosts List hosts, + @Named("elasticsearch_connect_timeout") Duration connectTimeout, + @Named("elasticsearch_socket_timeout") Duration socketTimeout, + @Named("elasticsearch_idle_timeout") Duration elasticsearchIdleTimeout, + @Named("elasticsearch_max_total_connections") int maxTotalConnections, + @Named("elasticsearch_max_total_connections_per_route") int maxTotalConnectionsPerRoute, + @Named("elasticsearch_max_retries") int elasticsearchMaxRetries, + @Named("elasticsearch_discovery_enabled") boolean discoveryEnabled, + @Named("elasticsearch_node_activity_logger_enabled") boolean nodeActivity, + @Named("elasticsearch_discovery_filter") @Nullable String discoveryFilter, + @Named("elasticsearch_discovery_frequency") Duration discoveryFrequency, + @Named("elasticsearch_discovery_default_scheme") String defaultSchemeForDiscoveredNodes, + @Named("elasticsearch_use_expect_continue") boolean useExpectContinue, + @Named("elasticsearch_mute_deprecation_warnings") boolean muteElasticsearchDeprecationWarnings, + CredentialsProvider credentialsProvider) { + clientSupplier = Suppliers.memoize(() -> { + final RestHighLevelClient client = buildClient(hosts, + connectTimeout, + socketTimeout, + maxTotalConnections, + maxTotalConnectionsPerRoute, + useExpectContinue, + muteElasticsearchDeprecationWarnings, + credentialsProvider); + + var sniffer = SnifferWrapper.create( + client.getLowLevelClient(), + TimeUnit.SECONDS.toMillis(5), + discoveryFrequency, + mapDefaultScheme(defaultSchemeForDiscoveredNodes) + ); + + if (discoveryEnabled) { + sniffer.add(FilteredElasticsearchNodesSniffer.create(discoveryFilter)); + } + if (nodeActivity) { + sniffer.add(NodeListSniffer.create()); + } + + sniffer.build().ifPresent(s -> shutdownService.register(s::close)); + + return client; + }); + } + + private ElasticsearchNodesSniffer.Scheme mapDefaultScheme(String defaultSchemeForDiscoveredNodes) { + switch (defaultSchemeForDiscoveredNodes.toUpperCase(Locale.ENGLISH)) { + case "HTTP": + return ElasticsearchNodesSniffer.Scheme.HTTP; + case "HTTPS": + return ElasticsearchNodesSniffer.Scheme.HTTPS; + default: + throw new IllegalArgumentException("Invalid default scheme for discovered ES nodes: " + defaultSchemeForDiscoveredNodes); + } + } + + @Override + public RestHighLevelClient get() { + return this.clientSupplier.get(); + } + + private RestHighLevelClient buildClient( + List hosts, + Duration connectTimeout, + Duration socketTimeout, + int maxTotalConnections, + int maxTotalConnectionsPerRoute, + boolean useExpectContinue, + boolean muteElasticsearchDeprecationWarnings, + CredentialsProvider credentialsProvider) { + final HttpHost[] esHosts = hosts.stream().map(uri -> new HttpHost(uri.getHost(), uri.getPort(), uri.getScheme())).toArray(HttpHost[]::new); + + final RestClientBuilder restClientBuilder = RestClient.builder(esHosts) + .setRequestConfigCallback(requestConfig -> requestConfig + .setConnectTimeout(Math.toIntExact(connectTimeout.toMilliseconds())) + .setSocketTimeout(Math.toIntExact(socketTimeout.toMilliseconds())) + .setExpectContinueEnabled(useExpectContinue) + .setAuthenticationEnabled(true) + ) + .setHttpClientConfigCallback(httpClientConfig -> { + httpClientConfig + .setMaxConnTotal(maxTotalConnections) + .setMaxConnPerRoute(maxTotalConnectionsPerRoute) + .setDefaultCredentialsProvider(credentialsProvider); + + if (muteElasticsearchDeprecationWarnings) { + httpClientConfig.addInterceptorFirst(new ElasticsearchFilterDeprecationWarningsInterceptor()); + } + + return httpClientConfig; + }); + + return new RestHighLevelClient(restClientBuilder); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Scroll.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Scroll.java new file mode 100644 index 000000000000..8abfbb4a2970 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/Scroll.java @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog2.indexer.results.ChunkedResult; +import org.graylog2.indexer.results.MultiChunkResultRetriever; +import org.graylog2.indexer.searches.ChunkCommand; + +import java.util.Set; + +@Deprecated +public class Scroll implements MultiChunkResultRetriever { + private static final String DEFAULT_SCROLLTIME = "1m"; + private final ElasticsearchClient client; + private final ScrollResultES7.Factory scrollResultFactory; + private final SearchRequestFactory searchRequestFactory; + + @Inject + public Scroll(ElasticsearchClient client, + ScrollResultES7.Factory scrollResultFactory, + SearchRequestFactory searchRequestFactory) { + this.client = client; + this.scrollResultFactory = scrollResultFactory; + this.searchRequestFactory = searchRequestFactory; + } + + @Override + public ChunkedResult retrieveChunkedResult(ChunkCommand chunkCommand) { + final SearchSourceBuilder searchQuery = searchRequestFactory.create(chunkCommand); + final SearchRequest request = scrollBuilder(searchQuery, chunkCommand.indices()); + final SearchResponse result = client.singleSearch(request, "Unable to perform scroll search"); + return scrollResultFactory.create(result, searchQuery.toString(), DEFAULT_SCROLLTIME, chunkCommand.fields(), chunkCommand.limit().orElse(-1)); + } + + private SearchRequest scrollBuilder(SearchSourceBuilder query, Set indices) { + return new SearchRequest(indices.toArray(new String[0])) + .source(query) + .scroll(DEFAULT_SCROLLTIME) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ScrollResultES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ScrollResultES7.java new file mode 100644 index 000000000000..44f648046d2d --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ScrollResultES7.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.inject.assistedinject.Assisted; +import com.google.inject.assistedinject.AssistedInject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.ClearScrollRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchScrollRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.unit.TimeValue; +import org.graylog2.indexer.results.ResultMessageFactory; + +import javax.annotation.Nullable; +import java.io.IOException; +import java.util.List; + +public class ScrollResultES7 extends ChunkedQueryResultES7 { + + private static final TimeValue DEFAULT_SCROLL = TimeValue.timeValueMinutes(1); + + private final String scroll; + + public interface Factory { + ScrollResultES7 create(SearchResponse initialResult, @Assisted("query") String query, @Assisted("scroll") String scroll, List fields, int limit); + } + + @AssistedInject + public ScrollResultES7(ResultMessageFactory resultMessagseFactory, + ElasticsearchClient client, + @Assisted SearchResponse initialResult, + @Assisted("query") String query, + @Assisted("scroll") String scroll, + @Assisted List fields, + @Assisted int limit) { + super(resultMessagseFactory, client, initialResult, query, fields, limit); + this.scroll = scroll; + + } + + @Override + @Nullable + protected SearchResponse nextSearchResult() throws IOException { + if (this.lastSearchResponse.getScrollId() == null) { + //with ignore_unavailable=true and no available indices, response does not contain scrollId + return null; + } + final SearchScrollRequest scrollRequest = new SearchScrollRequest(this.lastSearchResponse.getScrollId()); + scrollRequest.scroll(TimeValue.parseTimeValue(this.scroll, DEFAULT_SCROLL, "scroll time")); + return client.executeWithIOException((c, requestOptions) -> c.scroll(scrollRequest, requestOptions), + "Unable to retrieve next chunk from search: "); + } + + @Override + public void cancel() throws IOException { + if (this.lastSearchResponse.getScrollId() == null) { + //with ignore_unavailable=true and no available indices, response does not contain scrollId, there is nothing to cancel + return; + } + final ClearScrollRequest request = new ClearScrollRequest(); + request.addScrollId(this.lastSearchResponse.getScrollId()); + + client.executeWithIOException((c, requestOptions) -> c.clearScroll(request, requestOptions), + "Unable to cancel scrolling search request"); + } + + @Override + protected String getChunkingMethodName() { + return "scroll"; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchCommand.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchCommand.java new file mode 100644 index 000000000000..9c1812ecfb9f --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchCommand.java @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.auto.value.AutoValue; +import org.graylog.plugins.views.search.searchfilters.model.UsedSearchFilter; +import org.graylog2.indexer.searches.ChunkCommand; +import org.graylog2.indexer.searches.SearchesConfig; +import org.graylog2.indexer.searches.Sorting; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.OptionalLong; +import java.util.Set; + +@AutoValue +abstract class SearchCommand { + public abstract String query(); + public abstract Optional> streams(); + public abstract Optional sorting(); + public abstract Optional filter(); + public abstract List filters(); + public abstract Optional range(); + public abstract OptionalInt limit(); + public abstract OptionalInt offset(); + public abstract OptionalLong batchSize(); + public abstract boolean highlight(); + + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private static SearchCommand create( + String query, + Optional> streams, + Optional sorting, + Optional filter, + List filters, + Optional range, + OptionalInt limit, + OptionalInt offset, + OptionalLong batchSize, + boolean highlight) { + return new AutoValue_SearchCommand(query, streams, sorting, filter, filters, range, limit, offset, batchSize, highlight); + } + + static SearchCommand from(SearchesConfig searchesConfig) { + return create(searchesConfig.query(), Optional.empty(), Optional.ofNullable(searchesConfig.sorting()), + Optional.ofNullable(searchesConfig.filter()), Collections.emptyList(), Optional.of(searchesConfig.range()), + OptionalInt.of(searchesConfig.limit()), OptionalInt.of(searchesConfig.offset()), + OptionalLong.empty(), true); + } + + static SearchCommand from(ChunkCommand chunkCommand) { + return create(chunkCommand.query(), chunkCommand.streams(), chunkCommand.sorting(), + chunkCommand.filter(), chunkCommand.filters(), chunkCommand.range(), chunkCommand.limit(), chunkCommand.offset(), + chunkCommand.batchSize(), chunkCommand.highlight()); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchRequestFactory.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchRequestFactory.java new file mode 100644 index 000000000000..73ab80db2e45 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchRequestFactory.java @@ -0,0 +1,177 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.graylog.plugins.views.search.searchfilters.db.UsedSearchFiltersToQueryStringsMapper; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.slice.SliceBuilder; +import org.graylog2.indexer.searches.ChunkCommand; +import org.graylog2.indexer.searches.SearchesConfig; +import org.graylog2.indexer.searches.Sorting; +import org.graylog2.plugin.Message; +import org.graylog2.plugin.streams.Stream; + +import java.util.Optional; +import java.util.Set; + +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.termsQuery; + +public class SearchRequestFactory { + private static final Sorting DEFAULT_SORTING = new Sorting("_doc", Sorting.Direction.ASC); + private final SortOrderMapper sortOrderMapper; + private final boolean allowHighlighting; + private final boolean allowLeadingWildcardSearches; + private final UsedSearchFiltersToQueryStringsMapper searchFiltersMapper; + + @Inject + public SearchRequestFactory(SortOrderMapper sortOrderMapper, + @Named("allow_highlighting") boolean allowHighlighting, + @Named("allow_leading_wildcard_searches") boolean allowLeadingWildcardSearches, + UsedSearchFiltersToQueryStringsMapper searchFiltersMapper) { + this.sortOrderMapper = sortOrderMapper; + this.allowHighlighting = allowHighlighting; + this.allowLeadingWildcardSearches = allowLeadingWildcardSearches; + this.searchFiltersMapper = searchFiltersMapper; + } + + public SearchSourceBuilder create(SearchesConfig config) { + return create(SearchCommand.from(config)); + } + + public SearchSourceBuilder create(final ChunkCommand chunkCommand) { + final SearchSourceBuilder searchSourceBuilder = create(SearchCommand.from(chunkCommand)); + searchSourceBuilder.fetchSource(chunkCommand.fields().toArray(new String[0]), new String[0]); + chunkCommand.batchSize() + .ifPresent(batchSize -> searchSourceBuilder.size(Math.toIntExact(batchSize))); + chunkCommand.sliceParams() + .ifPresent(sliceParams -> searchSourceBuilder.slice(new SliceBuilder(sliceParams.id(), sliceParams.max()))); + return searchSourceBuilder; + } + + public SearchSourceBuilder create(SearchCommand searchCommand) { + final String query = normalizeQuery(searchCommand.query()); + + final QueryBuilder queryBuilder = isWildcardQuery(query) + ? matchAllQuery() + : queryStringQuery(query).allowLeadingWildcard(allowLeadingWildcardSearches); + + final Optional rangeQueryBuilder = searchCommand.range() + .map(TimeRangeQueryFactory::create) + .map(rangeQuery -> boolQuery().must(rangeQuery)); + final Optional filterQueryBuilder = searchCommand.filter() + .filter(filter -> !isWildcardQuery(filter)) + .map(QueryBuilders::queryStringQuery) + .map(queryStringQuery -> boolQuery().must(queryStringQuery)); + + final BoolQueryBuilder filteredQueryBuilder = boolQuery() + .must(queryBuilder); + filterQueryBuilder.ifPresent(filteredQueryBuilder::filter); + rangeQueryBuilder.ifPresent(filteredQueryBuilder::filter); + + applyStreamsFilter(filteredQueryBuilder, searchCommand); + + searchFiltersMapper.map(searchCommand.filters()) + .stream() + .map(this::translateQueryString) + .forEach(filteredQueryBuilder::filter); + + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(filteredQueryBuilder) + .trackTotalHits(true); + + applyPaginationIfPresent(searchSourceBuilder, searchCommand); + + applySortingIfPresent(searchSourceBuilder, searchCommand); + + applyHighlighting(searchSourceBuilder, searchCommand); + + return searchSourceBuilder; + } + + private QueryBuilder translateQueryString(String queryString) { + return (queryString.isEmpty() || queryString.trim().equals("*")) + ? QueryBuilders.matchAllQuery() + : QueryBuilders.queryStringQuery(queryString).allowLeadingWildcard(allowLeadingWildcardSearches); + } + + private void applyHighlighting(SearchSourceBuilder searchSourceBuilder, SearchCommand searchCommand) { + if (allowHighlighting && searchCommand.highlight()) { + final HighlightBuilder highlightBuilder = new HighlightBuilder() + .requireFieldMatch(false) + .field("*") + .fragmentSize(0) + .numOfFragments(0); + searchSourceBuilder.highlighter(highlightBuilder); + } + } + + private void applyPaginationIfPresent(SearchSourceBuilder searchSourceBuilder, SearchCommand command) { + command.offset().ifPresent(searchSourceBuilder::from); + command.limit().ifPresent(searchSourceBuilder::size); + } + + + private void applyStreamsFilter(BoolQueryBuilder filteredQueryBuilder, SearchCommand command) { + command.streams() + .map(this::buildStreamIdFilter) + .ifPresent(filteredQueryBuilder::filter); + } + + private BoolQueryBuilder buildStreamIdFilter(Set streams) { + final BoolQueryBuilder filterBuilder = boolQuery(); + + // If the included streams set contains the default stream, we also want all documents which do not + // have any stream assigned. Those documents have basically been in the "default stream" which didn't + // exist in Graylog <2.2.0. + if (streams.contains(Stream.DEFAULT_STREAM_ID)) { + filterBuilder.should(boolQuery().mustNot(existsQuery(Message.FIELD_STREAMS))); + } + + // Only select messages which are assigned to the given streams + filterBuilder.should(termsQuery(Message.FIELD_STREAMS, streams)); + + return filterBuilder; + } + + private void applySortingIfPresent(SearchSourceBuilder searchSourceBuilder, SearchCommand command) { + final Sorting sort = command.sorting().orElse(DEFAULT_SORTING); + searchSourceBuilder.sort(sort.getField(), sortOrderMapper.fromSorting(sort)); + } + + + private boolean isWildcardQuery(String filter) { + return normalizeQuery(filter).equals("*"); + } + + private String normalizeQuery(String query) { + if (query == null || query.trim().isEmpty()) { + return "*"; + } + return query.trim(); + } + +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchesAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchesAdapterES7.java new file mode 100644 index 000000000000..687aa27a1635 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SearchesAdapterES7.java @@ -0,0 +1,232 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.common.collect.Streams; +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.Cardinality; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.ValueCount; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog2.indexer.ranges.IndexRange; +import org.graylog2.indexer.results.ChunkedResult; +import org.graylog2.indexer.results.CountResult; +import org.graylog2.indexer.results.FieldStatsResult; +import org.graylog2.indexer.results.ResultMessage; +import org.graylog2.indexer.results.ResultMessageFactory; +import org.graylog2.indexer.results.SearchResult; +import org.graylog2.indexer.searches.ChunkCommand; +import org.graylog2.indexer.searches.SearchesAdapter; +import org.graylog2.indexer.searches.SearchesConfig; +import org.graylog2.indexer.searches.Sorting; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; + +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static com.google.common.base.MoreObjects.firstNonNull; + +public class SearchesAdapterES7 implements SearchesAdapter { + private static final String AGG_CARDINALITY = "gl2_field_cardinality"; + private static final String AGG_EXTENDED_STATS = "gl2_extended_stats"; + private static final String AGG_VALUE_COUNT = "gl2_value_count"; + + private final ResultMessageFactory resultMessageFactory; + private final ElasticsearchClient client; + private final Scroll scroll; + private final SearchRequestFactory searchRequestFactory; + + @Inject + public SearchesAdapterES7(ResultMessageFactory resultMessageFactory, + ElasticsearchClient client, + Scroll scroll, + SearchRequestFactory searchRequestFactory) { + this.resultMessageFactory = resultMessageFactory; + this.client = client; + this.scroll = scroll; + this.searchRequestFactory = searchRequestFactory; + } + + @Override + public CountResult count(Set affectedIndices, String query, TimeRange range, String filter) { + final SearchesConfig config = SearchesConfig.builder() + .query(query) + .range(range) + .filter(filter) + .limit(0) + .offset(0) + .build(); + final SearchSourceBuilder searchSourceBuilder = searchRequestFactory.create(config); + final SearchRequest searchRequest = new SearchRequest(affectedIndices.toArray(new String[0])) + .source(searchSourceBuilder); + + final SearchResponse result = client.search(searchRequest, "Fetching message count failed for indices "); + + return CountResult.create(result.getHits().getTotalHits().value, result.getTook().getMillis()); + } + + @Override + public ChunkedResult scroll(Set indexWildcards, Sorting sorting, String filter, String query, TimeRange range, int limit, int offset, List fields) { + return scroll(ChunkCommand.builder() + .indices(indexWildcards) + .sorting(sorting) + .filter(filter) + .query(query) + .range(range) + .limit(limit) + .offset(offset) + .fields(fields) + .build()); + } + + @Override + public ChunkedResult scroll(Set indexWildcards, Sorting sorting, String filter, String query, int batchSize) { + return scroll(ChunkCommand.builder() + .indices(indexWildcards) + .sorting(sorting) + .filter(filter) + .query(query) + .batchSize(batchSize) + .build()); + } + + @Override + public ChunkedResult scroll(ChunkCommand chunkCommand) { + return scroll.retrieveChunkedResult(chunkCommand); + } + + @Override + public SearchResult search(Set indices, Set indexRanges, SearchesConfig config) { + final SearchSourceBuilder searchSourceBuilder = searchRequestFactory.create(config); + + if (indexRanges.isEmpty()) { + return SearchResult.empty(config.query(), searchSourceBuilder.toString()); + } + + final SearchRequest searchRequest = new SearchRequest(indices.toArray(new String[0])) + .source(searchSourceBuilder); + final SearchResponse searchResult = client.search(searchRequest, "Unable to perform search query"); + + final List resultMessages = extractResultMessages(searchResult); + final long totalResults = searchResult.getHits().getTotalHits().value; + final long tookMs = searchResult.getTook().getMillis(); + final String builtQuery = searchSourceBuilder.toString(); + + return new SearchResult(resultMessages, totalResults, indexRanges, config.query(), builtQuery, tookMs); + } + + private List extractResultMessages(SearchResponse searchResult) { + return Streams.stream(searchResult.getHits()) + .map(hit -> resultMessageFactory.parseFromSource(hit.getId(), hit.getIndex(), hit.getSourceAsMap())) + .collect(Collectors.toList()); + } + + @Override + public FieldStatsResult fieldStats(String query, String filter, TimeRange range, Set indices, String field, boolean includeCardinality, boolean includeStats, boolean includeCount) { + final SearchesConfig config = SearchesConfig.builder() + .query(query) + .filter(filter) + .range(range) + .offset(0) + .limit(-1) + .build(); + final SearchSourceBuilder searchSourceBuilder = searchRequestFactory.create(config); + + if (includeCount) { + searchSourceBuilder.aggregation(AggregationBuilders.count(AGG_VALUE_COUNT).field(field)); + } + if (includeStats) { + searchSourceBuilder.aggregation(AggregationBuilders.extendedStats(AGG_EXTENDED_STATS).field(field)); + } + if (includeCardinality) { + searchSourceBuilder.aggregation(AggregationBuilders.cardinality(AGG_CARDINALITY).field(field)); + } + + if (indices.isEmpty()) { + return FieldStatsResult.empty(query, searchSourceBuilder.toString()); + } + + final SearchRequest searchRequest = new SearchRequest(indices.toArray(new String[0])) + .source(searchSourceBuilder); + + final SearchResponse searchResult = client.search(searchRequest, "Unable to retrieve fields stats"); + + final List resultMessages = extractResultMessages(searchResult); + final long tookMs = searchResult.getTook().getMillis(); + + final ExtendedStats extendedStatsAggregation = searchResult.getAggregations().get(AGG_EXTENDED_STATS); + final ValueCount valueCountAggregation = searchResult.getAggregations().get(AGG_VALUE_COUNT); + final Cardinality cardinalityAggregation = searchResult.getAggregations().get(AGG_CARDINALITY); + + return createFieldStatsResult(extendedStatsAggregation, + valueCountAggregation, + cardinalityAggregation, + resultMessages, + query, + searchSourceBuilder.toString(), + tookMs); + } + + private FieldStatsResult createFieldStatsResult(ExtendedStats extendedStatsAggregation, + ValueCount valueCountAggregation, + Cardinality cardinalityAggregation, + List resultMessages, + String query, + String builtQuery, + long tookMs) { + final long cardinality = cardinalityAggregation == null ? Long.MIN_VALUE : cardinalityAggregation.getValue(); + final long count = valueCountAggregation == null ? Long.MIN_VALUE : valueCountAggregation.getValue(); + + double sum = Double.NaN; + double sumOfSquares = Double.NaN; + double mean = Double.NaN; + double min = Double.NaN; + double max = Double.NaN; + double variance = Double.NaN; + double stdDeviation = Double.NaN; + + if (extendedStatsAggregation != null) { + sum = firstNonNull(extendedStatsAggregation.getSum(), Double.NaN); + sumOfSquares = firstNonNull(extendedStatsAggregation.getSumOfSquares(), Double.NaN); + mean = firstNonNull(extendedStatsAggregation.getAvg(), Double.NaN); + min = firstNonNull(extendedStatsAggregation.getMin(), Double.NaN); + max = firstNonNull(extendedStatsAggregation.getMax(), Double.NaN); + variance = firstNonNull(extendedStatsAggregation.getVariance(), Double.NaN); + stdDeviation = firstNonNull(extendedStatsAggregation.getStdDeviation(), Double.NaN); + } + + return FieldStatsResult.create( + count, + sum, + sumOfSquares, + mean, + min, + max, + variance, + stdDeviation, + cardinality, + resultMessages, + query, + builtQuery, + tookMs + ); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SnifferWrapper.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SnifferWrapper.java new file mode 100644 index 000000000000..189494bcc5bc --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SnifferWrapper.java @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.github.joschi.jadconfig.util.Duration; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Node; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RestClient; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.sniff.ElasticsearchNodesSniffer; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.sniff.Sniffer; + +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CopyOnWriteArrayList; + +public class SnifferWrapper implements org.graylog.shaded.elasticsearch7.org.elasticsearch.client.sniff.NodesSniffer { + private final List sniffers = new CopyOnWriteArrayList(); + private final RestClient restClient; + private final long sniffRequestTimeoutMillis; + private final Duration discoveryFrequency; + private final ElasticsearchNodesSniffer.Scheme scheme; + private org.graylog.shaded.elasticsearch7.org.elasticsearch.client.sniff.NodesSniffer nodesSniffer; + + private SnifferWrapper(RestClient restClient, long sniffRequestTimeoutMillis, Duration discoveryFrequency, ElasticsearchNodesSniffer.Scheme scheme) { + this.restClient = restClient; + this.sniffRequestTimeoutMillis = sniffRequestTimeoutMillis; + this.discoveryFrequency = discoveryFrequency; + this.scheme = scheme; + } + + @Override + public List sniff() throws IOException { + List nodes = this.nodesSniffer.sniff(); + for (NodesSniffer sniffer : sniffers) { + nodes = sniffer.sniff(nodes); + } + return nodes; + } + + public static SnifferWrapper create(RestClient restClient, long sniffRequestTimeoutMillis, Duration discoveryFrequency, ElasticsearchNodesSniffer.Scheme scheme) { + return new SnifferWrapper(restClient, sniffRequestTimeoutMillis, discoveryFrequency, scheme); + } + + public Optional build() { + if(sniffers.isEmpty()) { + return Optional.empty(); + } + + this.nodesSniffer = new ElasticsearchNodesSniffer(restClient, sniffRequestTimeoutMillis, scheme); + return Optional.of(Sniffer.builder(restClient) + .setSniffIntervalMillis(Math.toIntExact(discoveryFrequency.toMilliseconds())) + .setNodesSniffer(this) + .build()); + } + + public void add(NodesSniffer sniffer) { + this.sniffers.add(sniffer); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SortOrderMapper.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SortOrderMapper.java new file mode 100644 index 000000000000..841780893d83 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/SortOrderMapper.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.SortOrder; +import org.graylog2.indexer.searches.Sorting; + +import java.util.Locale; + +public class SortOrderMapper { + public SortOrder fromSorting(Sorting sorting) { + return SortOrder.valueOf(sorting.getDirection().toString().toUpperCase(Locale.ENGLISH)); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ThrowingBiFunction.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ThrowingBiFunction.java new file mode 100644 index 000000000000..970072df95d6 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ThrowingBiFunction.java @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +public interface ThrowingBiFunction { + R apply(A1 a1, A2 a2) throws E; +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/TimeRangeQueryFactory.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/TimeRangeQueryFactory.java new file mode 100644 index 000000000000..c8d5d4b72598 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/TimeRangeQueryFactory.java @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.RangeQueryBuilder; +import org.graylog2.plugin.Message; +import org.graylog2.plugin.Tools; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; + +import javax.annotation.Nullable; + +public class TimeRangeQueryFactory { + @Nullable + public static RangeQueryBuilder create(TimeRange range) { + if (range == null) { + return null; + } + + return QueryBuilders.rangeQuery(Message.FIELD_TIMESTAMP) + .gte(Tools.buildElasticSearchTimeFormat(range.getFrom())) + .lt(Tools.buildElasticSearchTimeFormat(range.getTo())); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/UnsupportedRemoteReindexingMigrationAdapterES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/UnsupportedRemoteReindexingMigrationAdapterES7.java new file mode 100644 index 000000000000..839e1aa93e9a --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/UnsupportedRemoteReindexingMigrationAdapterES7.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; +import org.graylog2.indexer.IndexSet; +import org.graylog2.indexer.datanode.RemoteReindexRequest; +import org.graylog2.indexer.datanode.RemoteReindexingMigrationAdapter; +import org.graylog2.indexer.migration.IndexerConnectionCheckResult; +import org.graylog2.indexer.migration.RemoteReindexMigration; + +import java.net.URI; +import java.util.Optional; + +public class UnsupportedRemoteReindexingMigrationAdapterES7 implements RemoteReindexingMigrationAdapter { + + public static final String UNSUPPORTED_MESSAGE = "This operation should never be called. We remote-reindex into the DataNode that contains OpenSearch. This adapter only exists for API completeness"; + + @Override + public boolean isMigrationRunning(IndexSet indexSet) { + return false; // we'll never run a remote reindex migration against elasticsearch target. It's always OS in datanode. + } + + @Override + public String start(RemoteReindexRequest request) { + throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); + } + + @Override + public RemoteReindexMigration status(@Nonnull String migrationID) { + throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); + } + + @Override + public IndexerConnectionCheckResult checkConnection(@Nonnull URI uri, @Nullable String username, @Nullable String password, @Nullable String allowlist, boolean trustUnknownCerts) { + throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); + } + + @Override + public Optional getLatestMigrationId() { + return Optional.empty(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ViewsESBackendModule.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ViewsESBackendModule.java new file mode 100644 index 000000000000..16ff9937e07f --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/ViewsESBackendModule.java @@ -0,0 +1,159 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7; + +import com.google.inject.Scopes; +import com.google.inject.TypeLiteral; +import com.google.inject.assistedinject.FactoryModuleBuilder; +import com.google.inject.binder.LinkedBindingBuilder; +import com.google.inject.binder.ScopedBindingBuilder; +import com.google.inject.multibindings.MapBinder; +import org.graylog.plugins.views.ViewsModule; +import org.graylog.plugins.views.search.SearchType; +import org.graylog.plugins.views.search.export.ExportBackend; +import org.graylog.plugins.views.search.searchtypes.MessageList; +import org.graylog.plugins.views.search.searchtypes.events.EventList; +import org.graylog.plugins.views.search.searchtypes.pivot.BucketSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.DateRangeBucket; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Time; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Values; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Average; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Cardinality; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Count; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Latest; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Max; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Min; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Percentage; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Percentile; +import org.graylog.plugins.views.search.searchtypes.pivot.series.StdDev; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Sum; +import org.graylog.plugins.views.search.searchtypes.pivot.series.SumOfSquares; +import org.graylog.plugins.views.search.searchtypes.pivot.series.Variance; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregation; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; +import org.graylog.storage.elasticsearch7.views.ElasticsearchBackend; +import org.graylog.storage.elasticsearch7.views.export.ElasticsearchExportBackend; +import org.graylog.storage.elasticsearch7.views.export.RequestStrategy; +import org.graylog.storage.elasticsearch7.views.searchtypes.ESEventList; +import org.graylog.storage.elasticsearch7.views.searchtypes.ESMessageList; +import org.graylog.storage.elasticsearch7.views.searchtypes.ESSearchTypeHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.ESPivot; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.ESPivotBucketSpecHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.ESPivotSeriesSpecHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.buckets.ESDateRangeHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.buckets.ESTimeHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.buckets.ESValuesHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESAverageHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESCardinalityHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESCountHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESLatestHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESMaxHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESMinHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESPercentageHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESPercentilesHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESStdDevHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESSumHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESSumOfSquaresHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.series.ESVarianceHandler; +import org.graylog2.storage.SearchVersion; + +public class ViewsESBackendModule extends ViewsModule { + private final SearchVersion supportedSearchVersion; + + public ViewsESBackendModule(SearchVersion supportedSearchVersion) { + this.supportedSearchVersion = supportedSearchVersion; + } + + @Override + protected void configure() { + install(new FactoryModuleBuilder().build(ESGeneratedQueryContext.Factory.class)); + + registerVersionedQueryBackend(supportedSearchVersion, ElasticsearchBackend.class); + + registerESSearchTypeHandler(MessageList.NAME, ESMessageList.class); + registerESSearchTypeHandler(EventList.NAME, ESEventList.class); + registerESSearchTypeHandler(Pivot.NAME, ESPivot.class).in(Scopes.SINGLETON); + + registerPivotSeriesHandler(Average.NAME, ESAverageHandler.class); + registerPivotSeriesHandler(Cardinality.NAME, ESCardinalityHandler.class); + registerPivotSeriesHandler(Count.NAME, ESCountHandler.class); + registerPivotSeriesHandler(Max.NAME, ESMaxHandler.class); + registerPivotSeriesHandler(Min.NAME, ESMinHandler.class); + registerPivotSeriesHandler(StdDev.NAME, ESStdDevHandler.class); + registerPivotSeriesHandler(Sum.NAME, ESSumHandler.class); + registerPivotSeriesHandler(SumOfSquares.NAME, ESSumOfSquaresHandler.class); + registerPivotSeriesHandler(Variance.NAME, ESVarianceHandler.class); + registerPivotSeriesHandler(Percentage.NAME, ESPercentageHandler.class); + registerPivotSeriesHandler(Percentile.NAME, ESPercentilesHandler.class); + registerPivotSeriesHandler(Latest.NAME, ESLatestHandler.class); + + registerPivotBucketHandler(Values.NAME, ESValuesHandler.class); + registerPivotBucketHandler(Time.NAME, ESTimeHandler.class); + registerPivotBucketHandler(DateRangeBucket.NAME, ESDateRangeHandler.class); + + bindExportBackend().to(ElasticsearchExportBackend.class); + bindRequestStrategy().to(org.graylog.storage.elasticsearch7.views.export.SearchAfter.class); + } + + private LinkedBindingBuilder bindRequestStrategy() { + return bind(RequestStrategy.class); + } + + private LinkedBindingBuilder bindExportBackend() { + return bindExportBackend(supportedSearchVersion); + } + + private MapBinder> pivotBucketHandlerBinder() { + return MapBinder.newMapBinder(binder(), + TypeLiteral.get(String.class), + new TypeLiteral<>() {}); + + } + + private void registerPivotBucketHandler( + String name, + Class> implementation + ) { + pivotBucketHandlerBinder().addBinding(name).to(implementation); + } + + protected MapBinder> pivotSeriesHandlerBinder() { + return MapBinder.newMapBinder(binder(), + TypeLiteral.get(String.class), + new TypeLiteral<>() {}); + + } + + private void registerPivotSeriesHandler( + String name, + Class> implementation + ) { + pivotSeriesHandlerBinder().addBinding(name).to(implementation); + } + + private MapBinder> esSearchTypeHandlerBinder() { + return MapBinder.newMapBinder(binder(), + TypeLiteral.get(String.class), + new TypeLiteral>() {}); + } + + private ScopedBindingBuilder registerESSearchTypeHandler(String name, Class> implementation) { + return esSearchTypeHandlerBinder().addBinding(name).to(implementation); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/blocks/BlockSettingsParser.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/blocks/BlockSettingsParser.java new file mode 100644 index 000000000000..f073d6121e46 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/blocks/BlockSettingsParser.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.blocks; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.collect.ImmutableOpenMap; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings; +import org.graylog2.indexer.indices.blocks.IndicesBlockStatus; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +public class BlockSettingsParser { + + static final String BLOCK_SETTINGS_PREFIX = "index.blocks."; + + public static IndicesBlockStatus parseBlockSettings(final GetSettingsResponse settingsResponse) { + return parseBlockSettings(settingsResponse, Optional.empty()); + } + + public static IndicesBlockStatus parseBlockSettings(final GetSettingsResponse settingsResponse, final Optional> indices) { + final IndicesBlockStatus result = new IndicesBlockStatus(); + final ImmutableOpenMap indexToSettingsMap = settingsResponse.getIndexToSettings(); + final String[] indicesInResponse = indexToSettingsMap.keys().toArray(String.class); + + indices.orElse(Arrays.stream(indicesInResponse).toList()).forEach(index -> { + final var settings = indexToSettingsMap.get(index); + if(settings != null) { + final Settings blockSettings = settings.getByPrefix(BLOCK_SETTINGS_PREFIX); + + if (!blockSettings.isEmpty()) { + final Set blockSettingsNames = blockSettings.names(); + final Set blockSettingsSetToTrue = blockSettingsNames.stream() + .filter(s -> blockSettings.getAsBoolean(s, false)) + .map(s -> BLOCK_SETTINGS_PREFIX + s) + .collect(Collectors.toSet()); + if (!blockSettingsSetToTrue.isEmpty()) { + result.addIndexBlocks(index, blockSettingsSetToTrue); + } + } + } + }); + + return result; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/AliasSummaryResponse.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/AliasSummaryResponse.java new file mode 100644 index 000000000000..f26164da6f19 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/AliasSummaryResponse.java @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.cat; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public record AliasSummaryResponse(@JsonProperty("alias") String alias, + @JsonProperty("index") String index) { +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/CatApi.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/CatApi.java new file mode 100644 index 000000000000..98789d050854 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/CatApi.java @@ -0,0 +1,134 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.cat; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Streams; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Response; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; + +import jakarta.inject.Inject; +import org.graylog2.indexer.indices.ShardsInfo; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +public class CatApi { + private final ObjectMapper objectMapper; + private final ElasticsearchClient client; + + @Inject + public CatApi(ObjectMapper objectMapper, + ElasticsearchClient client) { + this.objectMapper = objectMapper; + this.client = client; + } + + public Map aliases() { + final Request request = request("GET", "aliases"); + request.addParameter("h", "alias,index"); + final List response = perform(request, new TypeReference>() {}, "Unable to retrieve aliases"); + + return response.stream() + .collect(Collectors.toMap(AliasSummaryResponse::alias, AliasSummaryResponse::index)); + } + + public List nodes() { + final Request request = request("GET", "nodes"); + request.addParameter("h", "id,name,role,host,ip,fileDescriptorMax,diskUsed,diskTotal,diskUsedPercent"); + request.addParameter("full_id", "true"); + return perform(request, new TypeReference<>() {}, "Unable to retrieve nodes list"); + } + + public List indices() { + final Request request = request("GET", "indices"); + request.addParameter("h", "index,status,health"); + return perform(request, new TypeReference<>() {}, "Unable to retrieve indices list"); + } + + public Set indices(String index, Collection status, String errorMessage) { + return indices(Collections.singleton(index), status, errorMessage); + } + + public Set indices(Collection indices, Collection status, String errorMessage) { + final String joinedIndices = String.join(",", indices); + final JsonNode jsonResponse = requestIndices(joinedIndices, errorMessage); + + //noinspection UnstableApiUsage + return Streams.stream(jsonResponse.elements()) + .filter(index -> status.isEmpty() || status.contains(index.path("status").asText())) + .map(index -> index.path("index").asText()) + .collect(Collectors.toSet()); + } + + public Optional indexState(String indexName, String errorMessage) { + final JsonNode jsonResponse = requestIndices(indexName, errorMessage); + + //noinspection UnstableApiUsage + return Streams.stream(jsonResponse.elements()) + .filter(index -> index.path("index").asText().equals(indexName)) + .map(index -> index.path("status").asText()) + .findFirst(); + } + + public List getShardsInfo(String indexName) { + return requestShardsInfo(indexName).stream().map(ShardsInfo::create).toList(); + } + + private List requestShardsInfo(String indexName) { + final Request request = request("GET", "shards/" + indexName); + return perform(request, new TypeReference>() {}, "Unable to retrieve index shards"); + } + + private JsonNode requestIndices(String indexName, String errorMessage) { + final Request request = request("GET", "indices/" + indexName); + request.addParameter("h", "index,status"); + request.addParameter("expand_wildcards", "all"); + request.addParameter("s", "index,status"); + + return perform(request, new TypeReference() {}, errorMessage); + } + + private R perform(Request request, TypeReference responseClass, String errorMessage) { + return client.execute((c, requestOptions) -> { + request.setOptions(requestOptions); + + final Response response = c.getLowLevelClient().performRequest(request); + return returnType(response, responseClass); + }, errorMessage); + } + + private R returnType(Response response, TypeReference responseClass) throws IOException { + return objectMapper.readValue(response.getEntity().getContent(), responseClass); + } + + private Request request(@SuppressWarnings("SameParameterValue") String method, String endpoint) { + final Request request = new Request(method, "/_cat/" + endpoint); + request.addParameter("format", "json"); + + return request; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/IndexSummaryResponse.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/IndexSummaryResponse.java new file mode 100644 index 000000000000..4abe2224339a --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/IndexSummaryResponse.java @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.cat; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public record IndexSummaryResponse(@JsonProperty("index") String index, + @JsonProperty("status") String status, + @JsonProperty("health") String health) { +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/NodeResponse.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/NodeResponse.java new file mode 100644 index 000000000000..0605bf9c62dc --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cat/NodeResponse.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.cat; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.auto.value.AutoValue; + +import javax.annotation.Nullable; + +@AutoValue +@JsonAutoDetect +public abstract class NodeResponse { + public abstract String id(); + + public abstract String name(); + + public abstract String role(); + + @Nullable + public abstract String host(); + + public abstract String ip(); + + @Nullable + public abstract String diskUsed(); + + @Nullable + public abstract String diskTotal(); + + @Nullable + public abstract Double diskUsedPercent(); + + @Nullable + public abstract Long fileDescriptorMax(); + + @JsonCreator + public static NodeResponse create(@JsonProperty("id") String id, + @JsonProperty("name") String name, + @JsonProperty("role") String role, + @JsonProperty("host") @Nullable String host, + @JsonProperty("ip") String ip, + @JsonProperty("diskUsed") @Nullable String diskUsed, + @JsonProperty("diskTotal") @Nullable String diskTotal, + @JsonProperty("diskUsedPercent") @Nullable Double diskUsedPercent, + @JsonProperty("fileDescriptorMax") @Nullable Long fileDescriptorMax) { + return new AutoValue_NodeResponse( + id, + name, + role, + host, + ip, + diskUsed, + diskTotal, + diskUsedPercent, + fileDescriptorMax + ); + } + + @JsonIgnore + public boolean hasDiskStatistics() { + return diskUsed() != null && + diskTotal() != null && + diskUsedPercent() != null && + fileDescriptorMax() != null; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/client/ESCredentialsProvider.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/client/ESCredentialsProvider.java new file mode 100644 index 000000000000..ed8889aa2b8f --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/client/ESCredentialsProvider.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.client; + +import com.google.common.base.Splitter; +import com.google.common.base.Strings; +import org.graylog.shaded.elasticsearch7.org.apache.http.auth.AuthScope; +import org.graylog.shaded.elasticsearch7.org.apache.http.auth.UsernamePasswordCredentials; +import org.graylog.shaded.elasticsearch7.org.apache.http.client.CredentialsProvider; +import org.graylog.shaded.elasticsearch7.org.apache.http.impl.client.BasicCredentialsProvider; +import org.graylog2.configuration.IndexerHosts; + +import javax.annotation.Nullable; + +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.inject.Provider; + +import java.net.URI; +import java.util.Iterator; +import java.util.List; + +public class ESCredentialsProvider implements Provider { + private final List elasticsearchHosts; + private final String defaultUserForDiscoveredNodes; + private final String defaultPasswordForDiscoveredNodes; + private final boolean discoveryEnabled; + + @Inject + public ESCredentialsProvider(@IndexerHosts List elasticsearchHosts, + @Named("elasticsearch_discovery_default_user") @Nullable String defaultUserForDiscoveredNodes, + @Named("elasticsearch_discovery_default_password") @Nullable String defaultPasswordForDiscoveredNodes, + @Named("elasticsearch_discovery_enabled") boolean discoveryEnabled) { + this.elasticsearchHosts = elasticsearchHosts; + this.defaultUserForDiscoveredNodes = defaultUserForDiscoveredNodes; + this.defaultPasswordForDiscoveredNodes = defaultPasswordForDiscoveredNodes; + this.discoveryEnabled = discoveryEnabled; + } + + @Override + public CredentialsProvider get() { + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + elasticsearchHosts + .forEach(hostUri -> { + if (!Strings.isNullOrEmpty(hostUri.getUserInfo())) { + final Iterator splittedUserInfo = Splitter.on(":") + .split(hostUri.getUserInfo()) + .iterator(); + if (splittedUserInfo.hasNext()) { + final String username = splittedUserInfo.next(); + final String password = splittedUserInfo.hasNext() ? splittedUserInfo.next() : null; + credentialsProvider.setCredentials( + new AuthScope(hostUri.getHost(), hostUri.getPort(), AuthScope.ANY_REALM, AuthScope.ANY_SCHEME), + new UsernamePasswordCredentials(username, password) + ); + } + } + }); + + if (discoveryEnabled && !Strings.isNullOrEmpty(defaultUserForDiscoveredNodes) && !Strings.isNullOrEmpty(defaultPasswordForDiscoveredNodes)) { + credentialsProvider.setCredentials( + new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthScope.ANY_SCHEME), + new UsernamePasswordCredentials(defaultUserForDiscoveredNodes, defaultPasswordForDiscoveredNodes) + ); + } + + return credentialsProvider; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cluster/ClusterStateApi.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cluster/ClusterStateApi.java new file mode 100644 index 000000000000..919b2d3490ba --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/cluster/ClusterStateApi.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.cluster; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Streams; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Response; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; + +import jakarta.inject.Inject; + +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.mapping; + +public class ClusterStateApi { + private final ObjectMapper objectMapper; + private final ElasticsearchClient client; + + @Inject + public ClusterStateApi(ObjectMapper objectMapper, + ElasticsearchClient client) { + this.objectMapper = objectMapper; + this.client = client; + } + + public Map> fields(Collection indices) { + final Request request = request(indices); + + final JsonNode jsonResponse = client.execute((c, requestOptions) -> { + request.setOptions(requestOptions); + final Response response = c.getLowLevelClient().performRequest(request); + return objectMapper.readTree(response.getEntity().getContent()); + }, "Unable to retrieve fields from indices: " + String.join(",", indices)); + + //noinspection UnstableApiUsage + return Streams.stream(jsonResponse.path("metadata").path("indices").fields()) + .flatMap(index -> allFieldsFromIndex(index.getKey(), index.getValue())) + .collect(groupingBy(Map.Entry::getKey, mapping(Map.Entry::getValue, Collectors.toSet()))); + } + + private Stream> allFieldsFromIndex(String indexName, JsonNode indexMapping) { + //noinspection UnstableApiUsage + return Streams.stream(indexMapping.path("mappings").fields()) + .flatMap(documentType -> allFieldsFromDocumentType(indexName, documentType.getValue())); + } + + private Stream> allFieldsFromDocumentType(String indexName, JsonNode documentType) { + //noinspection UnstableApiUsage + return Streams.stream(documentType.path("properties").fields()) + .map(field -> new AbstractMap.SimpleEntry<>(indexName, field.getKey())); + } + + private Request request(Collection indices) { + final StringBuilder apiEndpoint = new StringBuilder("/_cluster/state/metadata"); + if (!indices.isEmpty()) { + final String joinedIndices = String.join(",", indices); + apiEndpoint.append("/"); + apiEndpoint.append(joinedIndices); + } + + return new Request("GET", apiEndpoint.toString()); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/fieldtypes/streams/StreamsForFieldRetrieverES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/fieldtypes/streams/StreamsForFieldRetrieverES7.java new file mode 100644 index 000000000000..7ae1970ce434 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/fieldtypes/streams/StreamsForFieldRetrieverES7.java @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.fieldtypes.streams; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.MultiSearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.ExistsQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregations; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; +import org.graylog2.indexer.fieldtypes.streamfiltered.esadapters.StreamsForFieldRetriever; +import org.graylog2.plugin.Message; + +import jakarta.inject.Inject; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +public class StreamsForFieldRetrieverES7 implements StreamsForFieldRetriever { + + private static final int SEARCH_MAX_BUCKETS_ES = 10_000; + + private final ElasticsearchClient client; + + @Inject + public StreamsForFieldRetrieverES7(final ElasticsearchClient client) { + this.client = client; + } + + @Override + public Map> getStreams(final List fieldNames, final String indexName) { + final List multiSearchResponse = client.msearch(fieldNames.stream() + .map(fieldName -> createSearchRequest(fieldName, indexName)) + .collect(Collectors.toList()), + "Unable to retrieve fields types aggregations"); + + + final List> streamsPerField = multiSearchResponse.stream() + .map(item -> retrieveStreamsFromAggregationInResponse(item.getResponse())) + .toList(); + + Map> result = new HashMap<>(fieldNames.size()); + for (int i = 0; i < fieldNames.size(); i++) { + result.put(fieldNames.get(i), streamsPerField.get(i)); + } + + return result; + + } + + @Override + public Set getStreams(final String fieldName, final String indexName) { + final SearchRequest searchRequest = createSearchRequest(fieldName, indexName); + + final SearchResponse searchResult = client.search(searchRequest, "Unable to retrieve fields types aggregations"); + + return retrieveStreamsFromAggregationInResponse(searchResult); + } + + private Set retrieveStreamsFromAggregationInResponse(final SearchResponse searchResult) { + final Aggregations aggregations = searchResult.getAggregations(); + if (aggregations != null) { + final Aggregation streamsAggregation = aggregations.get(Message.FIELD_STREAMS); + + if (streamsAggregation instanceof MultiBucketsAggregation) { + final List buckets = ((MultiBucketsAggregation) streamsAggregation).getBuckets(); + if (buckets != null) { + return buckets.stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toSet()); + } + } + } + return Set.of(); + } + + private SearchRequest createSearchRequest(final String fieldName, final String indexName) { + final SearchSourceBuilder searchSourceBuilder = createSearchSourceBuilder(fieldName); + return new SearchRequest(indexName) + .source(searchSourceBuilder); + } + + private SearchSourceBuilder createSearchSourceBuilder(final String fieldName) { + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(new ExistsQueryBuilder(fieldName)) + .trackTotalHits(false) + .size(0); + + searchSourceBuilder.aggregation(AggregationBuilders + .terms(Message.FIELD_STREAMS) + .field(Message.FIELD_STREAMS) + .size(SEARCH_MAX_BUCKETS_ES)); + return searchSourceBuilder; + } + +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/mapping/FieldMappingApi.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/mapping/FieldMappingApi.java new file mode 100644 index 000000000000..9dc7ba2cb924 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/mapping/FieldMappingApi.java @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.mapping; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.auto.value.AutoValue; +import com.google.common.collect.Streams; +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; + +import javax.annotation.Nullable; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +public class FieldMappingApi { + + private final ElasticsearchClient client; + + @Inject + public FieldMappingApi(ElasticsearchClient client) { + this.client = client; + } + + @AutoValue + public static abstract class FieldMapping { + public abstract String type(); + + public abstract Optional fielddata(); + + static FieldMapping create(String type, @Nullable Boolean fielddata) { + return new AutoValue_FieldMappingApi_FieldMapping(type, Optional.ofNullable(fielddata)); + } + } + + public Map fieldTypes(final String index) { + final JsonNode result = client.executeRequest(request(index), "Unable to retrieve field types of index " + index); + final JsonNode fields = result.path(index).path("mappings").path("properties"); + //noinspection UnstableApiUsage + return Streams.stream(fields.fields()) + .collect(Collectors.toMap(Map.Entry::getKey, entry -> { + final JsonNode entryValue = entry.getValue(); + String type = entryValue.path("type").asText(); + if ("alias".equals(type)) { + String aliasPath = entryValue.path("path").asText(); + type = fields.path(aliasPath).path("type").asText(); + } + return FieldMapping.create( + type, + entryValue.path("fielddata").asBoolean() + ); + })); + } + + private Request request(String index) { + return new Request("GET", "/" + index + "/_mapping"); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/migrations/V20170607164210_MigrateReopenedIndicesToAliasesClusterStateES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/migrations/V20170607164210_MigrateReopenedIndicesToAliasesClusterStateES7.java new file mode 100644 index 000000000000..791bd4ec917c --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/migrations/V20170607164210_MigrateReopenedIndicesToAliasesClusterStateES7.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.migrations; + +import com.fasterxml.jackson.databind.JsonNode; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.storage.elasticsearch7.PlainJsonApi; +import org.graylog2.migrations.V20170607164210_MigrateReopenedIndicesToAliases; + +import jakarta.inject.Inject; + +import java.util.Collection; + +public class V20170607164210_MigrateReopenedIndicesToAliasesClusterStateES7 implements V20170607164210_MigrateReopenedIndicesToAliases.ClusterState { + private final PlainJsonApi plainJsonApi; + + @Inject + public V20170607164210_MigrateReopenedIndicesToAliasesClusterStateES7(PlainJsonApi plainJsonApi) { + this.plainJsonApi = plainJsonApi; + } + + @Override + public JsonNode getForIndices(Collection indices) { + return plainJsonApi.perform(request(indices), + "Couldn't read cluster state for reopened indices " + indices); + } + + private Request request(Collection indices) { + final StringBuilder apiEndpoint = new StringBuilder("/_cluster/state/metadata"); + if (!indices.isEmpty()) { + final String joinedIndices = String.join(",", indices); + apiEndpoint.append("/"); + apiEndpoint.append(joinedIndices); + } + + return new Request("GET", apiEndpoint.toString()); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/stats/ClusterStatsApi.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/stats/ClusterStatsApi.java new file mode 100644 index 000000000000..1bc0ed25aafa --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/stats/ClusterStatsApi.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.stats; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import jakarta.inject.Inject; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.storage.elasticsearch7.PlainJsonApi; +import org.graylog2.rest.resources.system.indexer.responses.IndexSetStats; + +public class ClusterStatsApi { + private final ObjectMapper objectMapper; + private final PlainJsonApi jsonApi; + + @Inject + public ClusterStatsApi(ObjectMapper objectMapper, + PlainJsonApi jsonApi) { + this.objectMapper = objectMapper; + this.jsonApi = jsonApi; + } + + public IndexSetStats clusterStats() { + final Request request = new Request("GET", "/_cluster/stats"); + request.addParameter("filter_path", "indices.count,indices.docs.count,indices.store.size_in_bytes"); + final JsonNode stats = jsonApi.perform(request, "Couldn't read Elasticsearch cluster stats"); + + final long indicesCount = stats.path("indices").path("count").asLong(); + final long docsCount = stats.path("indices").path("docs").path("count").asLong(); + final long sizeBytes = stats.path("indices").path("store").path("size_in_bytes").asLong(); + + return IndexSetStats.create(indicesCount, docsCount, sizeBytes); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/stats/StatsApi.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/stats/StatsApi.java new file mode 100644 index 000000000000..20a33dbed82e --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/stats/StatsApi.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.stats; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableSet; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Response; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; + +import jakarta.inject.Inject; + +import java.util.Collection; +import java.util.Collections; +import java.util.Optional; +import java.util.function.Consumer; + +public class StatsApi { + private final ObjectMapper objectMapper; + private final ElasticsearchClient client; + + @Inject + public StatsApi(ObjectMapper objectMapper, + ElasticsearchClient client) { + this.objectMapper = objectMapper; + this.client = client; + } + + public JsonNode indexStats(String index) { + return stats(index); + } + + public JsonNode indexStatsWithShardLevel(String index) { + return indexStatsWithShardLevel(Collections.singleton(index)).path(index); + } + + public JsonNode indexStatsWithShardLevel(Collection indices) { + final JsonNode stats = stats(indices, request -> { + request.addParameter("level", "shards"); + request.addParameter("ignore_unavailable", "true"); + }); + + return stats.path("indices"); + } + + public JsonNode indexStatsWithDocsAndStore(Collection indices) { + final JsonNode stats = stats(indices, ImmutableSet.of("store", "docs")); + + return stats.path("indices"); + } + + public Optional storeSizes(String index) { + final JsonNode stats = stats(Collections.singleton(index), Collections.singleton("store")); + final JsonNode sizeInBytes = stats.path("indices") + .path(index) + .path("primaries") + .path("store") + .path("size_in_bytes"); + return Optional.of(sizeInBytes).filter(JsonNode::isNumber).map(JsonNode::asLong); + } + + private JsonNode stats(String index) { + return stats(Collections.singleton(index), Collections.emptySet(), (request) -> {}); + } + + private JsonNode stats(Collection indices, + Collection metrics) { + return stats(indices, metrics, (request) -> {}); + } + + private JsonNode stats(Collection indices, + Consumer fn) { + return stats(indices, Collections.emptySet(), fn); + } + + private JsonNode stats(Collection indices, + Collection metrics, + Consumer prepareRequest) { + final StringBuilder endpoint = new StringBuilder(); + if (!indices.isEmpty()) { + final String joinedIndices = String.join(",", indices); + endpoint.append("/"); + endpoint.append(joinedIndices); + } + endpoint.append("/_stats"); + if (!metrics.isEmpty()) { + final String joinedMetrics = String.join(",", metrics); + endpoint.append("/"); + endpoint.append(joinedMetrics); + } + + final Request request = new Request("GET", endpoint.toString()); + prepareRequest.accept(request); + return client.execute((c, requestOptions) -> { + request.setOptions(requestOptions); + final Response response = c.getLowLevelClient().performRequest(request); + return objectMapper.readTree(response.getEntity().getContent()); + }, "Unable to retrieve index stats for " + String.join(",", indices)); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/ESGeneratedQueryContext.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/ESGeneratedQueryContext.java new file mode 100644 index 000000000000..68855fa3b287 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/ESGeneratedQueryContext.java @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views; + +import com.google.common.base.MoreObjects; +import com.google.inject.assistedinject.Assisted; +import com.google.inject.assistedinject.AssistedInject; +import org.graylog.plugins.views.search.Filter; +import org.graylog.plugins.views.search.SearchType; +import org.graylog.plugins.views.search.elasticsearch.FieldTypesLookup; +import org.graylog.plugins.views.search.engine.GeneratedQueryContext; +import org.graylog.plugins.views.search.errors.SearchError; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSpec; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.joda.time.DateTimeZone; + +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +public class ESGeneratedQueryContext implements GeneratedQueryContext { + + private final ElasticsearchBackend elasticsearchBackend; + private final Map searchTypeQueries; + private final Map contextMap; + private final Set errors; + private final SearchSourceBuilder ssb; + + private final FieldTypesLookup fieldTypes; + private final MultiBucketsAggregation.Bucket rowBucket; + private final DateTimeZone timezone; + + @AssistedInject + public ESGeneratedQueryContext( + @Assisted ElasticsearchBackend elasticsearchBackend, + @Assisted SearchSourceBuilder ssb, + @Assisted Collection validationErrors, + @Assisted DateTimeZone timezone, + FieldTypesLookup fieldTypes) { + this.elasticsearchBackend = elasticsearchBackend; + this.ssb = ssb; + this.fieldTypes = fieldTypes; + this.errors = new HashSet<>(validationErrors); + this.rowBucket = null; + this.contextMap = new HashMap<>(); + this.searchTypeQueries = new HashMap<>(); + this.timezone = timezone; + } + + private ESGeneratedQueryContext( + ElasticsearchBackend elasticsearchBackend, + SearchSourceBuilder ssb, + Collection validationErrors, + FieldTypesLookup fieldTypes, + MultiBucketsAggregation.Bucket rowBucket, + Map searchTypeQueries, + Map contextMap, + DateTimeZone timezone) { + this.elasticsearchBackend = elasticsearchBackend; + this.ssb = ssb; + this.fieldTypes = fieldTypes; + this.errors = new HashSet<>(validationErrors); + this.rowBucket = rowBucket; + this.searchTypeQueries = searchTypeQueries; + this.contextMap = contextMap; + this.timezone = timezone; + } + + public interface Factory { + ESGeneratedQueryContext create( + ElasticsearchBackend elasticsearchBackend, + SearchSourceBuilder ssb, + Collection validationErrors, + DateTimeZone timezone + ); + } + + public SearchSourceBuilder searchSourceBuilder(SearchType searchType) { + return this.searchTypeQueries.computeIfAbsent(searchType.id(), (ignored) -> { + final QueryBuilder queryBuilder = generateFilterClause(searchType.filter()) + .map(filterClause -> (QueryBuilder)new BoolQueryBuilder().must(ssb.query()).must(filterClause)) + .orElse(ssb.query()); + return ssb.shallowCopy() + .slice(ssb.slice()) + .query(queryBuilder); + }); + } + + Map searchTypeQueries() { + return this.searchTypeQueries; + } + + @Override + public Optional getSearchTypeQueryString(String id) { + return Optional.ofNullable(searchTypeQueries.get(id)).map(SearchSourceBuilder::toString); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("elasticsearch query", ssb) + .toString(); + } + + public Map contextMap() { + return contextMap; + } + + private Optional generateFilterClause(Filter filter) { + return elasticsearchBackend.generateFilterClause(filter); + } + + public String seriesName(SeriesSpec seriesSpec, Pivot pivot) { + return pivot.id() + "-series-" + seriesSpec.id(); + } + + public Optional fieldType(Set streamIds, String field) { + return fieldTypes.getType(streamIds, field); + } + + @Override + public void addError(SearchError error) { + errors.add(error); + } + + @Override + public Collection errors() { + return errors; + } + + public ESGeneratedQueryContext withRowBucket(MultiBucketsAggregation.Bucket rowBucket) { + return new ESGeneratedQueryContext(elasticsearchBackend, ssb, errors, fieldTypes, rowBucket, searchTypeQueries, contextMap, timezone); + } + + public Optional rowBucket() { + return Optional.ofNullable(this.rowBucket); + } + + @Override + public DateTimeZone timezone() { + return timezone; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/ElasticsearchBackend.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/ElasticsearchBackend.java new file mode 100644 index 000000000000..a7a7a83a20a3 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/ElasticsearchBackend.java @@ -0,0 +1,375 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views; + +import com.google.common.collect.Maps; +import io.opentelemetry.instrumentation.annotations.WithSpan; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.inject.Provider; +import jakarta.validation.constraints.NotNull; +import org.graylog.plugins.views.search.Filter; +import org.graylog.plugins.views.search.GlobalOverride; +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.QueryResult; +import org.graylog.plugins.views.search.SearchJob; +import org.graylog.plugins.views.search.SearchType; +import org.graylog.plugins.views.search.elasticsearch.IndexLookup; +import org.graylog.plugins.views.search.engine.BackendQuery; +import org.graylog.plugins.views.search.engine.QueryBackend; +import org.graylog.plugins.views.search.engine.QueryExecutionStats; +import org.graylog.plugins.views.search.engine.monitoring.collection.StatsCollector; +import org.graylog.plugins.views.search.errors.SearchError; +import org.graylog.plugins.views.search.errors.SearchTypeError; +import org.graylog.plugins.views.search.errors.SearchTypeErrorParser; +import org.graylog.plugins.views.search.filter.AndFilter; +import org.graylog.plugins.views.search.filter.OrFilter; +import org.graylog.plugins.views.search.filter.QueryStringFilter; +import org.graylog.plugins.views.search.filter.StreamFilter; +import org.graylog.plugins.views.search.searchfilters.db.UsedSearchFiltersToQueryStringsMapper; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.ShardOperationFailedException; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.MultiSearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.PlainActionFuture; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; +import org.graylog.storage.elasticsearch7.TimeRangeQueryFactory; +import org.graylog.storage.elasticsearch7.views.searchtypes.ESSearchTypeHandler; +import org.graylog2.indexer.ElasticsearchException; +import org.graylog2.indexer.FieldTypeException; +import org.graylog2.indexer.ranges.IndexRange; +import org.graylog2.plugin.Message; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; +import org.graylog2.streams.StreamService; +import org.joda.time.DateTimeZone; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +public class ElasticsearchBackend implements QueryBackend { + private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchBackend.class); + + private final Map>> elasticsearchSearchTypeHandlers; + private final ElasticsearchClient client; + private final IndexLookup indexLookup; + private final ESGeneratedQueryContext.Factory queryContextFactory; + private final UsedSearchFiltersToQueryStringsMapper usedSearchFiltersToQueryStringsMapper; + private final boolean allowLeadingWildcard; + private final StatsCollector executionStatsCollector; + private final StreamService streamService; + + @Inject + public ElasticsearchBackend(Map>> elasticsearchSearchTypeHandlers, + ElasticsearchClient client, + IndexLookup indexLookup, + ESGeneratedQueryContext.Factory queryContextFactory, + UsedSearchFiltersToQueryStringsMapper usedSearchFiltersToQueryStringsMapper, + StatsCollector executionStatsCollector, + StreamService streamService, + @Named("allow_leading_wildcard_searches") boolean allowLeadingWildcard) { + this.elasticsearchSearchTypeHandlers = elasticsearchSearchTypeHandlers; + this.client = client; + this.indexLookup = indexLookup; + + this.queryContextFactory = queryContextFactory; + this.usedSearchFiltersToQueryStringsMapper = usedSearchFiltersToQueryStringsMapper; + this.executionStatsCollector = executionStatsCollector; + this.streamService = streamService; + this.allowLeadingWildcard = allowLeadingWildcard; + } + + private QueryBuilder translateQueryString(String queryString) { + return (queryString.isEmpty() || queryString.trim().equals("*")) + ? QueryBuilders.matchAllQuery() + : QueryBuilders.queryStringQuery(queryString).allowLeadingWildcard(allowLeadingWildcard); + } + + @Override + public StatsCollector getExecutionStatsCollector() { + return this.executionStatsCollector; + } + + @WithSpan + @Override + public ESGeneratedQueryContext generate(Query query, Set validationErrors, DateTimeZone timezone) { + final BackendQuery backendQuery = query.query(); + + final Set searchTypes = query.searchTypes(); + + final QueryBuilder normalizedRootQuery = translateQueryString(backendQuery.queryString()); + + final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() + .filter(normalizedRootQuery); + + usedSearchFiltersToQueryStringsMapper.map(query.filters()) + .stream() + .map(this::translateQueryString) + .forEach(boolQuery::filter); + + // add the optional root query filters + generateFilterClause(query.filter()).ifPresent(boolQuery::filter); + + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(boolQuery) + .from(0) + .size(0) + .trackTotalHits(true); + + + final ESGeneratedQueryContext queryContext = queryContextFactory.create(this, searchSourceBuilder, validationErrors, timezone); + searchTypes.stream() + .filter(searchType -> !isSearchTypeWithError(queryContext, searchType.id())) + .forEach(searchType -> { + final String type = searchType.type(); + final Provider> searchTypeHandler = elasticsearchSearchTypeHandlers.get(type); + if (searchTypeHandler == null) { + LOG.error("Unknown search type {} for elasticsearch backend, cannot generate query part. Skipping this search type.", type); + queryContext.addError(new SearchTypeError(query, searchType.id(), "Unknown search type '" + type + "' for elasticsearch backend, cannot generate query")); + return; + } + + final SearchSourceBuilder searchTypeSourceBuilder = queryContext.searchSourceBuilder(searchType); + + final Set effectiveStreamIds = query.effectiveStreams(searchType); + + final BoolQueryBuilder searchTypeOverrides = QueryBuilders.boolQuery() + .must(searchTypeSourceBuilder.query()) + .must( + Objects.requireNonNull( + TimeRangeQueryFactory.create( + query.effectiveTimeRange(searchType) + ), + "Timerange for search type " + searchType.id() + " cannot be found in query or search type." + ) + ) + .must(QueryBuilders.termsQuery(Message.FIELD_STREAMS, effectiveStreamIds)); + + searchType.query().ifPresent(searchTypeQuery -> { + final QueryBuilder normalizedSearchTypeQuery = translateQueryString(searchTypeQuery.queryString()); + searchTypeOverrides.must(normalizedSearchTypeQuery); + }); + + usedSearchFiltersToQueryStringsMapper.map(searchType.filters()) + .stream() + .map(this::translateQueryString) + .forEach(searchTypeOverrides::must); + + searchTypeSourceBuilder.query(searchTypeOverrides); + + searchTypeHandler.get().generateQueryPart(query, searchType, queryContext); + }); + + return queryContext; + } + + // TODO make pluggable + public Optional generateFilterClause(Filter filter) { + if (filter == null) { + return Optional.empty(); + } + + switch (filter.type()) { + case AndFilter.NAME: + final BoolQueryBuilder andBuilder = QueryBuilders.boolQuery(); + filter.filters().stream() + .map(this::generateFilterClause) + .forEach(optQueryBuilder -> optQueryBuilder.ifPresent(andBuilder::must)); + return Optional.of(andBuilder); + case OrFilter.NAME: + final BoolQueryBuilder orBuilder = QueryBuilders.boolQuery(); + // TODO for the common case "any of these streams" we can optimize the filter into + // a single "termsQuery" instead of "termQuery OR termQuery" if all direct children are "StreamFilter" + filter.filters().stream() + .map(this::generateFilterClause) + .forEach(optQueryBuilder -> optQueryBuilder.ifPresent(orBuilder::should)); + return Optional.of(orBuilder); + case StreamFilter.NAME: + // Skipping stream filter, will be extracted elsewhere + return Optional.empty(); + case QueryStringFilter.NAME: + return Optional.of(QueryBuilders.queryStringQuery(((QueryStringFilter) filter).query())); + } + return Optional.empty(); + } + + @Override + public Set indexRangesForStreamsInTimeRange(Set streamIds, TimeRange timeRange) { + return indexLookup.indexRangesForStreamsInTimeRange(streamIds, timeRange); + } + + @Override + public Optional streamTitle(String streamId) { + return Optional.ofNullable(streamService.streamTitleFromCache(streamId)); + } + + @WithSpan + @Override + public QueryResult doRun(SearchJob job, Query query, ESGeneratedQueryContext queryContext) { + if (query.searchTypes().isEmpty()) { + return QueryResult.builder() + .query(query) + .searchTypes(Collections.emptyMap()) + .errors(new HashSet<>(queryContext.errors())) + .build(); + } + LOG.debug("Running query {} for job {}", query.id(), job.getId()); + final HashMap resultsMap = Maps.newHashMap(); + + final Set affectedIndices = indexLookup.indexNamesForStreamsInTimeRange(query.usedStreamIds(), query.timerange()); + + final Map searchTypeQueries = queryContext.searchTypeQueries(); + final List searchTypeIds = new ArrayList<>(searchTypeQueries.keySet()); + + final List searches = searchTypeIds + .stream() + .map(searchTypeId -> { + final Set affectedIndicesForSearchType = query.searchTypes().stream() + .filter(s -> s.id().equalsIgnoreCase(searchTypeId)).findFirst() + .flatMap(searchType -> { + if (searchType.effectiveStreams().isEmpty() + && query.globalOverride().flatMap(GlobalOverride::timerange).isEmpty() + && searchType.timerange().isEmpty()) { + return Optional.empty(); + } + return Optional.of(indexLookup.indexNamesForStreamsInTimeRange(query.effectiveStreams(searchType), query.effectiveTimeRange(searchType))); + }) + .orElse(affectedIndices); + + Set indices = affectedIndicesForSearchType.isEmpty() ? Collections.singleton("") : affectedIndicesForSearchType; + return new SearchRequest() + .source(searchTypeQueries.get(searchTypeId)) + .indices(indices.toArray(new String[0])) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); + }) + .toList(); + + //ES does not support per-request cancel_after_time_interval. We have to use simplified solution - the whole multi-search will be cancelled if it takes more than configured max. exec. time. + final PlainActionFuture mSearchFuture = client.cancellableMsearch(searches); + job.setQueryExecutionFuture(query.id(), mSearchFuture); + final List results = getResults(mSearchFuture, job.getCancelAfterSeconds(), searches.size()); + + for (SearchType searchType : query.searchTypes()) { + final String searchTypeId = searchType.id(); + final Provider> handlerProvider = elasticsearchSearchTypeHandlers.get(searchType.type()); + if (handlerProvider == null) { + LOG.error("Unknown search type '{}', cannot convert query result.", searchType.type()); + // no need to add another error here, as the query generation code will have added the error about the missing handler already + continue; + } + + if (isSearchTypeWithError(queryContext, searchTypeId)) { + LOG.error("Failed search type '{}', cannot convert query result, skipping.", searchType.type()); + // no need to add another error here, as the query generation code will have added the error about the missing handler already + continue; + } + + // we create a new instance because some search type handlers might need to track information between generating the query and + // processing its result, such as aggregations, which depend on the name and type + final ESSearchTypeHandler handler = handlerProvider.get(); + final int searchTypeIndex = searchTypeIds.indexOf(searchTypeId); + final MultiSearchResponse.Item multiSearchResponse = results.get(searchTypeIndex); + if (multiSearchResponse.isFailure()) { + ElasticsearchException e = new ElasticsearchException("Search type returned error: ", multiSearchResponse.getFailure()); + queryContext.addError(SearchTypeErrorParser.parse(query, searchTypeId, e)); + } else if (checkForFailedShards(multiSearchResponse).isPresent()) { + ElasticsearchException e = checkForFailedShards(multiSearchResponse).get(); + queryContext.addError(SearchTypeErrorParser.parse(query, searchTypeId, e)); + } else { + try { + final SearchType.Result searchTypeResult = handler.extractResult(job, query, searchType, multiSearchResponse.getResponse(), queryContext); + if (searchTypeResult != null) { + resultsMap.put(searchTypeId, searchTypeResult); + } + } catch (Exception e) { + LOG.warn("Unable to extract results: ", e); + queryContext.addError(new SearchTypeError(query, searchTypeId, e)); + } + } + } + + LOG.debug("Query {} ran for job {}", query.id(), job.getId()); + return QueryResult.builder() + .query(query) + .searchTypes(resultsMap) + .errors(new HashSet<>(queryContext.errors())) + .build(); + } + + @NotNull + private static List getResults(PlainActionFuture mSearchFuture, + final Integer cancelAfterSeconds, + final int numSearchTypes) { + try { + if (!SearchJob.NO_CANCELLATION.equals(cancelAfterSeconds)) { + return Arrays.asList(mSearchFuture.get(cancelAfterSeconds, TimeUnit.SECONDS).getResponses()); + } else { + return Arrays.asList(mSearchFuture.get().getResponses()); + } + } catch (TimeoutException | InterruptedException | ExecutionException e) { + return Collections.nCopies(numSearchTypes, new MultiSearchResponse.Item(null, e)); + } + } + + private Optional checkForFailedShards(MultiSearchResponse.Item multiSearchResponse) { + if (multiSearchResponse.isFailure()) { + return Optional.of(new ElasticsearchException(multiSearchResponse.getFailureMessage(), multiSearchResponse.getFailure())); + } + + final SearchResponse searchResponse = multiSearchResponse.getResponse(); + if (searchResponse != null && searchResponse.getFailedShards() > 0) { + final List shardFailures = Arrays.stream(searchResponse.getShardFailures()) + .map(ShardOperationFailedException::getCause) + .toList(); + final List nonNumericFieldErrors = shardFailures + .stream() + .map(Throwable::getMessage) + .filter(message -> message.contains("Expected numeric type on field")) + .distinct() + .toList(); + if (!nonNumericFieldErrors.isEmpty()) { + return Optional.of(new FieldTypeException("Unable to perform search query: ", nonNumericFieldErrors)); + } + + final List errors = shardFailures + .stream() + .map(Throwable::getMessage) + .distinct() + .toList(); + return Optional.of(new ElasticsearchException("Unable to perform search query: ", errors)); + } + + return Optional.empty(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/ElasticsearchExportBackend.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/ElasticsearchExportBackend.java new file mode 100644 index 000000000000..262c70553a4e --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/ElasticsearchExportBackend.java @@ -0,0 +1,210 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.export; + +import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.graylog.plugins.views.search.elasticsearch.ElasticsearchQueryString; +import org.graylog.plugins.views.search.elasticsearch.IndexLookup; +import org.graylog.plugins.views.search.export.ExportBackend; +import org.graylog.plugins.views.search.export.ExportMessagesCommand; +import org.graylog.plugins.views.search.export.SimpleMessage; +import org.graylog.plugins.views.search.export.SimpleMessageChunk; +import org.graylog.plugins.views.search.searchfilters.db.UsedSearchFiltersToQueryStringsMapper; +import org.graylog.plugins.views.search.searchfilters.model.UsedSearchFilter; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.TermsQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHit; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.storage.elasticsearch7.TimeRangeQueryFactory; +import org.graylog2.database.filtering.AttributeFilter; +import org.graylog2.plugin.Message; +import org.joda.time.DateTimeZone; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; + +import static java.util.Objects.requireNonNull; +import static java.util.stream.Collectors.toCollection; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders.termsQuery; + +@SuppressWarnings("rawtypes") +public class ElasticsearchExportBackend implements ExportBackend { + private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchExportBackend.class); + + private final IndexLookup indexLookup; + private final RequestStrategy requestStrategy; + private final boolean allowLeadingWildcard; + + private final UsedSearchFiltersToQueryStringsMapper usedSearchFiltersToQueryStringsMapper; + + @Inject + public ElasticsearchExportBackend(IndexLookup indexLookup, + RequestStrategy requestStrategy, + @Named("allow_leading_wildcard_searches") boolean allowLeadingWildcard, + final UsedSearchFiltersToQueryStringsMapper usedSearchFiltersToQueryStringsMapper) { + this.indexLookup = indexLookup; + this.requestStrategy = requestStrategy; + this.allowLeadingWildcard = allowLeadingWildcard; + this.usedSearchFiltersToQueryStringsMapper = usedSearchFiltersToQueryStringsMapper; + } + + @Override + public void run(ExportMessagesCommand command, Consumer chunkCollector) { + boolean isFirstChunk = true; + int totalCount = 0; + + while (true) { + List hits = search(command); + + if (hits.isEmpty()) { + publishChunk(chunkCollector, hits, command.fieldsInOrder(), command.timeZone(), SimpleMessageChunk.ChunkOrder.LAST); + return; + } + + boolean success = publishChunk(chunkCollector, hits, command.fieldsInOrder(), command.timeZone(), isFirstChunk ? SimpleMessageChunk.ChunkOrder.FIRST : SimpleMessageChunk.ChunkOrder.INTERMEDIATE); + if (!success) { + return; + } + + totalCount += hits.size(); + if (command.limit().isPresent() && totalCount >= command.limit().getAsInt()) { + LOG.info("Limit of {} reached. Stopping message retrieval.", command.limit().getAsInt()); + publishChunk(chunkCollector, Collections.emptyList(), command.fieldsInOrder(), command.timeZone(), SimpleMessageChunk.ChunkOrder.LAST); + return; + } + + isFirstChunk = false; + } + } + + private List search(ExportMessagesCommand command) { + SearchRequest search = prepareSearchRequest(command); + + return requestStrategy.nextChunk(search, command); + } + + private SearchRequest prepareSearchRequest(ExportMessagesCommand command) { + SearchSourceBuilder ssb = searchSourceBuilderFrom(command); + + Set indices = indicesFor(command); + return new SearchRequest() + .source(ssb) + .indices(indices.toArray(new String[0])) + .indicesOptions(IndicesOptions.fromOptions(false, false, true, false)); + } + + private SearchSourceBuilder searchSourceBuilderFrom(ExportMessagesCommand command) { + QueryBuilder query = queryFrom(command); + + SearchSourceBuilder ssb = new SearchSourceBuilder() + .query(query) + .size(command.chunkSize()); + if (!command.exportAllFields()) { + ssb = ssb.fetchSource(command.fieldsInOrder().toArray(new String[]{}), null); + } + + return requestStrategy.configure(ssb); + } + + private QueryBuilder queryFrom(ExportMessagesCommand command) { + final BoolQueryBuilder boolQueryBuilder = boolQuery() + .filter(queryStringFilter(command.queryString())) + .filter(timestampFilter(command)) + .filter(streamsFilter(command)); + + final List attributeFilters = command.attributeFilters(); + if (attributeFilters != null && !attributeFilters.isEmpty()) { + attributeFilters.stream() + .flatMap(attribute -> attribute.toQueryStrings().stream()) + .forEach(filterQuery -> boolQueryBuilder.filter(QueryBuilders.queryStringQuery(filterQuery))); + } + + final Collection usedSearchFilters = command.usedSearchFilters(); + if (usedSearchFilters != null) { + usedSearchFiltersToQueryStringsMapper.map(usedSearchFilters) + .forEach(filterQueryString -> boolQueryBuilder.filter(queryStringFilter(filterQueryString))); + } + return boolQueryBuilder; + } + + private QueryBuilder queryStringFilter(final ElasticsearchQueryString backendQuery) { + return backendQuery.isEmpty() ? + matchAllQuery() : + queryStringQuery(backendQuery.queryString()).allowLeadingWildcard(allowLeadingWildcard); + } + + private QueryBuilder queryStringFilter(final String queryString) { + ElasticsearchQueryString backendQuery = ElasticsearchQueryString.of(queryString); + return queryStringFilter(backendQuery); + } + + private QueryBuilder timestampFilter(ExportMessagesCommand command) { + return requireNonNull(TimeRangeQueryFactory.create(command.timeRange())); + } + + private TermsQueryBuilder streamsFilter(ExportMessagesCommand command) { + return termsQuery(Message.FIELD_STREAMS, command.streams()); + } + + private Set indicesFor(ExportMessagesCommand command) { + return indexLookup.indexNamesForStreamsInTimeRange(command.streams(), command.timeRange()); + } + + private boolean publishChunk(Consumer chunkCollector, List hits, LinkedHashSet desiredFieldsInOrder, DateTimeZone timeZone, SimpleMessageChunk.ChunkOrder chunkOrder) { + SimpleMessageChunk chunk = chunkFrom(hits, desiredFieldsInOrder, timeZone, chunkOrder); + + try { + chunkCollector.accept(chunk); + return true; + } catch (Exception e) { + LOG.warn("Chunk publishing threw exception. Stopping search after queries", e); + return false; + } + } + + private SimpleMessageChunk chunkFrom(List hits, LinkedHashSet desiredFieldsInOrder, DateTimeZone timeZone, SimpleMessageChunk.ChunkOrder chunkOrder) { + LinkedHashSet messages = messagesFrom(hits, timeZone); + + return SimpleMessageChunk.builder() + .fieldsInOrder(desiredFieldsInOrder) + .messages(messages) + .chunkOrder(chunkOrder) + .build(); + } + + private LinkedHashSet messagesFrom(List hits, DateTimeZone timeZone) { + return hits.stream() + .map(h -> buildHitWithAllFields(h.getSourceAsMap(), h.getIndex(), timeZone)) + .collect(toCollection(LinkedHashSet::new)); + } + + +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/ExportClient.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/ExportClient.java new file mode 100644 index 000000000000..f93383564d08 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/ExportClient.java @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.export; + +import jakarta.inject.Inject; +import org.graylog.plugins.views.search.export.ExportException; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.ShardOperationFailedException; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RequestOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.RestHighLevelClient; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; +import org.graylog.storage.elasticsearch7.ThrowingBiFunction; +import org.graylog2.indexer.ElasticsearchException; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +public class ExportClient { + private final ElasticsearchClient client; + + @Inject + public ExportClient(ElasticsearchClient client) { + this.client = client; + } + + public SearchResponse search(SearchRequest request, String errorMessage) { + try { + final SearchResponse response = this.client.search(request, errorMessage); + if (response.getFailedShards() > 0) { + final List errors = Arrays.stream(response.getShardFailures()) + .map(ShardOperationFailedException::getCause) + .map(Throwable::getMessage) + .distinct() + .toList(); + throw new ElasticsearchException("Unable to perform export query: ", errors); + } + return response; + } catch (Exception e) { + throw wrapException(e); + } + } + + private ExportException wrapException(Exception e) { + return new ExportException("Unable to complete export: ", new ElasticsearchException(e)); + } + + public SearchResponse singleSearch(SearchRequest request, String errorMessage) { + try { + return this.client.singleSearch(request, errorMessage); + } catch (Exception e) { + throw wrapException(e); + } + } + + public R execute(ThrowingBiFunction fn, String errorMessage) { + try { + return this.client.execute(fn, errorMessage); + } catch (Exception e) { + throw wrapException(e); + } + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/RequestStrategy.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/RequestStrategy.java new file mode 100644 index 000000000000..ae0a98a6fe74 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/RequestStrategy.java @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.export; + +import org.graylog.plugins.views.search.export.ExportMessagesCommand; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHit; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; + +import java.util.List; + +public interface RequestStrategy { + List nextChunk(SearchRequest search, ExportMessagesCommand command); + + /** + * Allows implementers to specify options on SearchSourceBuilder that cannot be specified on Search.Builder. + * + * @see #nextChunk(SearchRequest, ExportMessagesCommand) + * @see org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder#searchAfter(Object[]) + */ + default SearchSourceBuilder configure(SearchSourceBuilder ssb) { + return ssb; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/SearchAfter.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/SearchAfter.java new file mode 100644 index 000000000000..a71cc7b656b4 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/export/SearchAfter.java @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.export; + +import com.google.common.collect.Streams; +import jakarta.inject.Inject; +import org.graylog.plugins.views.search.export.ExportMessagesCommand; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchRequest; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHit; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.SortBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.SortOrder; +import org.graylog2.plugin.Message; + +import java.util.List; +import java.util.stream.Collectors; + +public class SearchAfter implements RequestStrategy { + + static final String DEFAULT_TIEBREAKER_FIELD = Message.GL2_SECOND_SORT_FIELD; + static final String EVENTS_TIEBREAKER_FIELD = Message.FIELD_ID; + + private final ExportClient client; + + private Object[] searchAfterValues = null; + + @Inject + public SearchAfter(ExportClient client) { + this.client = client; + } + + @Override + public List nextChunk(SearchRequest search, ExportMessagesCommand command) { + + SearchResponse result = search(search); + List hits = Streams.stream(result.getHits()).collect(Collectors.toList()); + searchAfterValues = lastHitSortFrom(hits); + return hits; + } + + private SearchResponse search(SearchRequest search) { + configureSort(search.source()); + + return client.search(search, "Failed to execute Search After request"); + } + + private void configureSort(SearchSourceBuilder source) { + source.sort(SortBuilders.fieldSort("timestamp").order(SortOrder.ASC)); + source.sort(SortBuilders.fieldSort(DEFAULT_TIEBREAKER_FIELD).order(SortOrder.ASC).unmappedType("keyword")); + } + + private Object[] lastHitSortFrom(List hits) { + if (hits.isEmpty()) + return null; + + SearchHit lastHit = hits.get(hits.size() - 1); + + return lastHit.getSortValues(); + } + + @Override + public SearchSourceBuilder configure(SearchSourceBuilder ssb) { + return searchAfterValues == null ? ssb : ssb.searchAfter(searchAfterValues); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/migrations/V20200730000000_AddGl2MessageIdFieldAliasForEventsES7.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/migrations/V20200730000000_AddGl2MessageIdFieldAliasForEventsES7.java new file mode 100644 index 000000000000..59e823b5068f --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/migrations/V20200730000000_AddGl2MessageIdFieldAliasForEventsES7.java @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.migrations; + +import com.google.common.collect.ImmutableMap; +import org.graylog.plugins.views.migrations.V20200730000000_AddGl2MessageIdFieldAliasForEvents; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.IndicesOptions; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.indices.PutMappingRequest; +import org.graylog.storage.elasticsearch7.ElasticsearchClient; +import org.graylog2.indexer.ElasticsearchException; + +import jakarta.inject.Inject; + +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.Set; + +import static org.graylog2.plugin.Message.FIELD_GL2_MESSAGE_ID; + +public class V20200730000000_AddGl2MessageIdFieldAliasForEventsES7 implements V20200730000000_AddGl2MessageIdFieldAliasForEvents.ElasticsearchAdapter { + + private final ElasticsearchClient client; + + @Inject + public V20200730000000_AddGl2MessageIdFieldAliasForEventsES7(ElasticsearchClient client) { + this.client = client; + } + + @Override + public void addGl2MessageIdFieldAlias(Set indexPrefixes) { + + final String[] prefixesWithWildcard = indexPrefixes.stream().map(p -> p + "*").toArray(String[]::new); + + final PutMappingRequest putMappingRequest = new PutMappingRequest(prefixesWithWildcard) + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED) + .source(ImmutableMap.of("properties", ImmutableMap.of(FIELD_GL2_MESSAGE_ID, aliasMapping()))); + + try { + final AcknowledgedResponse acknowledgedResponse = client.execute((c, requestOptions) -> c.indices().putMapping(putMappingRequest, requestOptions)); + if (!acknowledgedResponse.isAcknowledged()) { + throw new ElasticsearchException(errorMsgFor(prefixesWithWildcard) + " Elasticsearch failed to acknowledge."); + } + } catch (ElasticsearchException e) { + throw new ElasticsearchException(errorMsgFor(prefixesWithWildcard), e); + } + } + + private String errorMsgFor(String[] prefixesWithWildcard) { + return "Failed to add field alias " + FIELD_GL2_MESSAGE_ID + " for indices " + Arrays.toString(prefixesWithWildcard) + "."; + } + + static LinkedHashMap aliasMapping() { + LinkedHashMap aliasMapping = new LinkedHashMap<>(); + aliasMapping.put("type", "alias"); + aliasMapping.put("path", "id"); + return aliasMapping; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESEventList.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESEventList.java new file mode 100644 index 000000000000..6daf563c5a65 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESEventList.java @@ -0,0 +1,120 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.opentelemetry.instrumentation.annotations.WithSpan; +import jakarta.inject.Inject; +import org.graylog.events.event.EventDto; +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.SearchJob; +import org.graylog.plugins.views.search.SearchType; +import org.graylog.plugins.views.search.searchtypes.events.CommonEventSummary; +import org.graylog.plugins.views.search.searchtypes.events.EventList; +import org.graylog.plugins.views.search.searchtypes.events.EventSummary; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.BoolQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHit; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregations; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.FieldSortBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.SortOrder; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +public class ESEventList implements ESSearchTypeHandler { + private final ObjectMapper objectMapper; + + @Inject + public ESEventList(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + @Override + public void doGenerateQueryPart(Query query, EventList eventList, + ESGeneratedQueryContext queryContext) { + final Set effectiveStreams = eventList.streams().isEmpty() + ? query.usedStreamIds() + : eventList.streams(); + + final var searchSourceBuilder = queryContext.searchSourceBuilder(eventList); + final FieldSortBuilder sortConfig = sortConfig(eventList); + searchSourceBuilder.sort(sortConfig); + final var queryBuilder = searchSourceBuilder.query(); + if (!effectiveStreams.isEmpty() && queryBuilder instanceof BoolQueryBuilder boolQueryBuilder) { + boolQueryBuilder.must(QueryBuilders.termsQuery(EventDto.FIELD_SOURCE_STREAMS, effectiveStreams)); + } + if (!eventList.attributes().isEmpty() && queryBuilder instanceof BoolQueryBuilder boolQueryBuilder) { + final var filterQueries = eventList.attributes().stream() + .filter(attribute -> EventList.KNOWN_ATTRIBUTES.contains(attribute.field())) + .flatMap(attribute -> attribute.toQueryStrings().stream()) + .toList(); + + filterQueries.forEach(filterQuery -> boolQueryBuilder.filter(QueryBuilders.queryStringQuery(filterQuery))); + } + + eventList.page().ifPresentOrElse(page -> { + final int pageSize = eventList.perPage().orElse(EventList.DEFAULT_PAGE_SIZE); + searchSourceBuilder.size(pageSize); + searchSourceBuilder.from((page - 1) * pageSize); + }, () -> searchSourceBuilder.size(10000)); + } + + private SortOrder toSortOrder(EventList.Direction direction) { + return switch (direction) { + case ASC -> SortOrder.ASC; + case DESC -> SortOrder.DESC; + }; + } + + protected FieldSortBuilder sortConfig(EventList eventList) { + final var sortConfig = eventList.sort() + .filter(sort -> EventList.KNOWN_ATTRIBUTES.contains(sort.field())) + .orElse(EventList.DEFAULT_SORT); + return new FieldSortBuilder(sortConfig.field()).order(toSortOrder(sortConfig.direction())); + } + + protected List> extractResult(SearchResponse result) { + return StreamSupport.stream(result.getHits().spliterator(), false) + .map(SearchHit::getSourceAsMap) + .collect(Collectors.toList()); + } + + @WithSpan + @Override + public SearchType.Result doExtractResult(SearchJob job, Query query, EventList searchType, SearchResponse result, + Aggregations aggregations, ESGeneratedQueryContext queryContext) { + final Set effectiveStreams = searchType.streams().isEmpty() + ? query.usedStreamIds() + : searchType.streams(); + final List eventSummaries = extractResult(result).stream() + .map(rawEvent -> objectMapper.convertValue(rawEvent, EventDto.class)) + .map(EventSummary::parse) + .collect(Collectors.toList()); + final EventList.Result.Builder resultBuilder = EventList.Result.builder() + .events(eventSummaries) + .id(searchType.id()) + .totalResults(result.getHits().getTotalHits().value); + searchType.name().ifPresent(resultBuilder::name); + return resultBuilder.build(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESMessageList.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESMessageList.java new file mode 100644 index 000000000000..c12de5dc6781 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESMessageList.java @@ -0,0 +1,196 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes; + +import io.opentelemetry.instrumentation.annotations.WithSpan; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.graylog.plugins.views.search.LegacyDecoratorProcessor; +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.SearchJob; +import org.graylog.plugins.views.search.SearchType; +import org.graylog.plugins.views.search.searchtypes.MessageList; +import org.graylog.plugins.views.search.searchtypes.Sort; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.text.Text; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.index.query.QueryStringQueryBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHit; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregations; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.fetch.subphase.highlight.HighlightField; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.FieldSortBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.SortBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.sort.SortOrder; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; +import org.graylog2.indexer.results.ResultMessage; +import org.graylog2.indexer.results.ResultMessageFactory; +import org.graylog2.plugin.Message; +import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange; +import org.graylog2.rest.models.messages.responses.ResultMessageSummary; +import org.graylog2.rest.resources.search.responses.SearchResponse; +import org.joda.time.DateTime; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import static com.google.common.base.MoreObjects.firstNonNull; + +public class ESMessageList implements ESSearchTypeHandler { + private final LegacyDecoratorProcessor decoratorProcessor; + private final ResultMessageFactory resultMessageFactory; + private final boolean allowHighlighting; + + @Inject + public ESMessageList(LegacyDecoratorProcessor decoratorProcessor, + ResultMessageFactory resultMessageFactory, + @Named("allow_highlighting") boolean allowHighlighting) { + this.decoratorProcessor = decoratorProcessor; + this.resultMessageFactory = resultMessageFactory; + this.allowHighlighting = allowHighlighting; + } + + private ResultMessage resultMessageFromSearchHit(SearchHit hit) { + final Map> highlights = hit.getHighlightFields().entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, ESMessageList::highlightsFromFragments)); + return resultMessageFactory.parseFromSource(hit.getId(), hit.getIndex(), hit.getSourceAsMap(), highlights); + } + + private static List highlightsFromFragments(Map.Entry entry) { + return Arrays.stream(entry.getValue().fragments()) + .map(Text::toString) + .collect(Collectors.toList()); + } + + @Override + public void doGenerateQueryPart(Query query, MessageList messageList, ESGeneratedQueryContext queryContext) { + + final SearchSourceBuilder searchSourceBuilder = queryContext.searchSourceBuilder(messageList) + .size(messageList.limit()) + .from(messageList.offset()); + + applyHighlightingIfActivated(searchSourceBuilder, query); + + final Set effectiveStreamIds = query.effectiveStreams(messageList); + + if (!messageList.fields().isEmpty()) { + searchSourceBuilder.fetchSource(messageList.fields().toArray(new String[0]), new String[0]); + } + + List sorts = firstNonNull(messageList.sort(), Collections.singletonList(Sort.create(Message.FIELD_TIMESTAMP, Sort.Order.DESC))); + + // Always add the gl2_second_sort_field alias, if sorting by timestamp is requested. + // The alias points to gl2_message_id which contains a sequence nr that represents the order in which messages were received. + // If messages have identical timestamps, we can still sort them correctly. + final Optional timeStampSort = findSort(sorts, Message.FIELD_TIMESTAMP); + final Optional msgIdSort = findSort(sorts, Message.FIELD_GL2_MESSAGE_ID); + final Optional secondSortField = findSort(sorts, Message.GL2_SECOND_SORT_FIELD); + if (timeStampSort.isPresent() && msgIdSort.isEmpty() && secondSortField.isEmpty()) { + sorts = new ArrayList<>(sorts); + final Sort newMsgIdSort = Sort.create(Message.GL2_SECOND_SORT_FIELD, timeStampSort.get().order()); + sorts.add(sorts.indexOf(timeStampSort.get()) + 1, newMsgIdSort); + } + sorts.forEach(sort -> { + final FieldSortBuilder fieldSort = SortBuilders.fieldSort(sort.field()) + .order(toSortOrder(sort.order())); + if (sort.field().equals(Message.GL2_SECOND_SORT_FIELD)) { + fieldSort.unmappedType("keyword"); // old indices might not have a mapping for gl2_second_sort_field + searchSourceBuilder.sort(fieldSort); + } else { + final Optional fieldType = queryContext.fieldType(effectiveStreamIds, sort.field()); + searchSourceBuilder.sort(fieldType.map(fieldSort::unmappedType).orElse(fieldSort)); + } + }); + } + + private static Optional findSort(List sorts, String search) { + return sorts.stream().filter(s -> s.field().equals(search)).findFirst(); + } + + private SortOrder toSortOrder(Sort.Order sortOrder) { + switch (sortOrder) { + case ASC: + return SortOrder.ASC; + case DESC: + return SortOrder.DESC; + default: + throw new IllegalStateException("Invalid sort order: " + sortOrder); + } + } + + private void applyHighlightingIfActivated(SearchSourceBuilder searchSourceBuilder, Query query) { + if (!allowHighlighting) { + return; + } + + final QueryStringQueryBuilder highlightQuery = decoratedHighlightQuery(query); + + searchSourceBuilder.highlighter(new HighlightBuilder().requireFieldMatch(false) + .highlightQuery(highlightQuery) + .field("*") + .fragmentSize(0) + .numOfFragments(0)); + } + + private QueryStringQueryBuilder decoratedHighlightQuery(Query query) { + final String queryString = query.query().queryString(); + + return QueryBuilders.queryStringQuery(queryString); + } + + @WithSpan + @Override + public SearchType.Result doExtractResult(SearchJob job, Query query, MessageList searchType, org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse result, Aggregations aggregations, ESGeneratedQueryContext queryContext) { + final List messages = StreamSupport.stream(result.getHits().spliterator(), false) + .map(this::resultMessageFromSearchHit) + .map((resultMessage) -> ResultMessageSummary.create(resultMessage.highlightRanges, resultMessage.getMessage().getFields(), resultMessage.getIndex())) + .collect(Collectors.toList()); + + final String queryString = query.query().queryString(); + + final DateTime from = query.effectiveTimeRange(searchType).getFrom(); + final DateTime to = query.effectiveTimeRange(searchType).getTo(); + + final SearchResponse searchResponse = SearchResponse.create( + queryString, + queryString, + Collections.emptySet(), + messages, + Collections.emptySet(), + 0, + result.getHits().getTotalHits().value, + from, + to + ); + + final SearchResponse decoratedSearchResponse = decoratorProcessor.decorateSearchResponse(searchResponse, searchType.decorators()); + + final MessageList.Result.Builder resultBuilder = MessageList.Result.result(searchType.id()) + .messages(decoratedSearchResponse.messages()) + .effectiveTimerange(AbsoluteRange.create(from, to)) + .totalResults(decoratedSearchResponse.totalResults()); + return searchType.name().map(resultBuilder::name).orElse(resultBuilder).build(); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESSearchTypeHandler.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESSearchTypeHandler.java new file mode 100644 index 000000000000..78643365f40d --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/ESSearchTypeHandler.java @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes; + +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.SearchJob; +import org.graylog.plugins.views.search.SearchType; +import org.graylog.plugins.views.search.engine.SearchTypeHandler; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregations; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; + +/** + * Signature of search type handlers the elasticsearch backend takes. + * All of these take a {@link ESGeneratedQueryContext} as input. + * + * @param the {@link SearchType SearchType} this handler deals with + */ +public interface ESSearchTypeHandler extends SearchTypeHandler { + @Override + default SearchType.Result doExtractResultImpl(SearchJob job, Query query, S searchType, SearchResponse queryResult, ESGeneratedQueryContext queryContext) { + // if the search type was filtered, extract the sub-aggregation before passing it to the handler + // this way we don't have to duplicate this step everywhere + final Aggregations aggregations = queryResult.getAggregations(); + return doExtractResult(job, query, searchType, queryResult, aggregations, queryContext); + } + + SearchType.Result doExtractResult(SearchJob job, Query query, S searchType, SearchResponse queryResult, Aggregations aggregations, ESGeneratedQueryContext queryContext); +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/AggTypes.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/AggTypes.java new file mode 100644 index 000000000000..e356200ff2a2 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/AggTypes.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import org.graylog.plugins.views.search.searchtypes.pivot.PivotSpec; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.HasAggregations; +import org.jooq.lambda.tuple.Tuple; +import org.jooq.lambda.tuple.Tuple2; + +import java.util.HashMap; +import java.util.Map; + +/** + * This solely exists to hide the nasty type signature of the aggregation type map from the rest of the code. + * It's just ugly and in the way. + */ +public class AggTypes { + final Map>> aggTypeMap = new HashMap<>(); + + public void record(PivotSpec pivotSpec, String name, Class aggClass) { + aggTypeMap.put(pivotSpec, Tuple.tuple(name, aggClass)); + } + + public Aggregation getSubAggregation(PivotSpec pivotSpec, HasAggregations currentAggregationOrBucket) { + final Tuple2> tuple2 = getTypes(pivotSpec); + return currentAggregationOrBucket.getAggregations().get(tuple2.v1); + } + + public Tuple2> getTypes(PivotSpec pivotSpec) { + return aggTypeMap.get(pivotSpec); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivot.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivot.java new file mode 100644 index 000000000000..bef8ba41803d --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivot.java @@ -0,0 +1,267 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import com.google.common.collect.ImmutableList; +import io.opentelemetry.instrumentation.annotations.WithSpan; +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.SearchJob; +import org.graylog.plugins.views.search.SearchType; +import org.graylog.plugins.views.search.searchtypes.pivot.BucketSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.BucketSpecHandler; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.PivotResult; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSpec; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregations; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.HasAggregations; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.builder.SearchSourceBuilder; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; +import org.graylog.storage.elasticsearch7.views.searchtypes.ESSearchTypeHandler; +import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.inject.Inject; + +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +public class ESPivot implements ESSearchTypeHandler { + private static final Logger LOG = LoggerFactory.getLogger(ESPivot.class); + private static final String AGG_NAME = "agg"; + + private final Map> bucketHandlers; + private final Map> seriesHandlers; + private final EffectiveTimeRangeExtractor effectiveTimeRangeExtractor; + + @Inject + public ESPivot(Map> bucketHandlers, + Map> seriesHandlers, + EffectiveTimeRangeExtractor effectiveTimeRangeExtractor) { + this.bucketHandlers = bucketHandlers; + this.seriesHandlers = seriesHandlers; + this.effectiveTimeRangeExtractor = effectiveTimeRangeExtractor; + } + + @Override + public void doGenerateQueryPart(Query query, Pivot pivot, ESGeneratedQueryContext queryContext) { + LOG.debug("Generating aggregation for {}", pivot); + final SearchSourceBuilder searchSourceBuilder = queryContext.searchSourceBuilder(pivot); + + final Map contextMap = queryContext.contextMap(); + final AggTypes aggTypes = new AggTypes(); + contextMap.put(pivot.id(), aggTypes); + + var generateRollups = pivot.rollup() || (pivot.rowGroups().isEmpty() && pivot.columnGroups().isEmpty()); + + // add global rollup series if those were requested + if (generateRollups) { + seriesStream(pivot, queryContext, "global rollup") + .filter(result -> Placement.METRIC.equals(result.placement())) + .map(SeriesAggregationBuilder::aggregationBuilder) + .forEach(searchSourceBuilder::aggregation); + } + + final BucketSpecHandler.CreatedAggregations createdAggregations = createPivots(BucketSpecHandler.Direction.Row, query, pivot, pivot.rowGroups(), queryContext); + final AggregationBuilder rootAggregation = createdAggregations.root(); + final AggregationBuilder leafAggregation = createdAggregations.leaf(); + final List metrics = createdAggregations.metrics(); + seriesStream(pivot, queryContext, "metrics") + .forEach(result -> { + switch (result.placement()) { + case METRIC -> metrics.forEach(metric -> metric.subAggregation(result.aggregationBuilder())); + case ROW -> rootAggregation.subAggregation(result.aggregationBuilder()); + case ROOT -> { + if (!generateRollups) { + searchSourceBuilder.aggregation(result.aggregationBuilder()); + } + } + } + }); + + if (!pivot.columnGroups().isEmpty()) { + final BucketSpecHandler.CreatedAggregations columnsAggregation = createPivots(BucketSpecHandler.Direction.Column, query, pivot, pivot.columnGroups(), queryContext); + final AggregationBuilder columnsRootAggregation = columnsAggregation.root(); + final AggregationBuilder columnsLeafAggregation = columnsAggregation.leaf(); + final List columnMetrics = columnsAggregation.metrics(); + seriesStream(pivot, queryContext, "metrics") + .forEach(result -> { + var aggregationBuilder = result.aggregationBuilder(); + switch (result.placement()) { + case COLUMN -> columnsLeafAggregation.subAggregation(aggregationBuilder); + case METRIC -> columnMetrics.forEach(metric -> metric.subAggregation(aggregationBuilder)); + } + }); + if (leafAggregation != null) { + leafAggregation.subAggregation(columnsRootAggregation); + } else { + searchSourceBuilder.aggregation(columnsRootAggregation); + } + } + + if (rootAggregation != null) { + searchSourceBuilder.aggregation(rootAggregation); + } + + addTimeStampAggregations(searchSourceBuilder); + } + + private void addTimeStampAggregations(SearchSourceBuilder searchSourceBuilder) { + final MinAggregationBuilder startTimestamp = AggregationBuilders.min("timestamp-min").field("timestamp"); + final MaxAggregationBuilder endTimestamp = AggregationBuilders.max("timestamp-max").field("timestamp"); + searchSourceBuilder.aggregation(startTimestamp); + searchSourceBuilder.aggregation(endTimestamp); + } + + private BucketSpecHandler.CreatedAggregations createPivots(BucketSpecHandler.Direction direction, Query query, Pivot pivot, List pivots, ESGeneratedQueryContext queryContext) { + AggregationBuilder leaf = null; + AggregationBuilder root = null; + final List metrics = new ArrayList<>(); + for (BucketSpec bucketSpec : pivots) { + final ESPivotBucketSpecHandler bucketHandler = bucketHandlers.get(bucketSpec.type()); + final BucketSpecHandler.CreatedAggregations bucketAggregations = bucketHandler.createAggregation(direction, AGG_NAME, pivot, bucketSpec, queryContext, query); + final AggregationBuilder aggregationRoot = bucketAggregations.root(); + final AggregationBuilder aggregationLeaf = bucketAggregations.leaf(); + final List aggregationMetrics = bucketAggregations.metrics(); + + metrics.addAll(aggregationMetrics); + if (root == null && leaf == null) { + root = aggregationRoot; + leaf = aggregationLeaf; + } else { + leaf.subAggregation(aggregationRoot); + leaf = aggregationLeaf; + } + } + + return BucketSpecHandler.CreatedAggregations.create(root, leaf, metrics); + } + + private Stream seriesStream(Pivot pivot, ESGeneratedQueryContext queryContext, String reason) { + return pivot.series() + .stream() + .distinct() + .flatMap((seriesSpec) -> { + final String seriesName = queryContext.seriesName(seriesSpec, pivot); + LOG.debug("Adding {} series '{}' with name '{}'", reason, seriesSpec.type(), seriesName); + final ESPivotSeriesSpecHandler esPivotSeriesSpecHandler = seriesHandlers.get(seriesSpec.type()); + if (esPivotSeriesSpecHandler == null) { + throw new IllegalArgumentException("No series handler registered for: " + seriesSpec.type()); + } + return esPivotSeriesSpecHandler.createAggregation(seriesName, pivot, seriesSpec, this, queryContext).stream(); + }); + } + + @WithSpan + @Override + public SearchType.Result doExtractResult(SearchJob job, Query query, Pivot pivot, SearchResponse queryResult, Aggregations aggregations, ESGeneratedQueryContext queryContext) { + final AbsoluteRange effectiveTimerange = this.effectiveTimeRangeExtractor.extract(queryResult, query, pivot); + + final PivotResult.Builder resultBuilder = PivotResult.builder() + .id(pivot.id()) + .effectiveTimerange(effectiveTimerange) + .total(extractDocumentCount(queryResult)); + + pivot.name().ifPresent(resultBuilder::name); + + final MultiBucketsAggregation.Bucket initialBucket = createInitialBucket(queryResult); + + retrieveBuckets(pivot, pivot.rowGroups(), initialBucket) + .forEach(tuple -> { + final ImmutableList rowKeys = tuple.keys(); + final MultiBucketsAggregation.Bucket rowBucket = tuple.bucket(); + final PivotResult.Row.Builder rowBuilder = PivotResult.Row.builder() + .key(rowKeys) + .source("leaf"); + if (pivot.columnGroups().isEmpty() || pivot.rollup()) { + processSeries(rowBuilder, queryResult, queryContext, pivot, new ArrayDeque<>(), rowBucket, true, "row-leaf"); + } + if (!pivot.columnGroups().isEmpty()) { + var contextWithRowBucket = queryContext.withRowBucket(rowBucket); + retrieveBuckets(pivot, pivot.columnGroups(), rowBucket) + .forEach(columnBucketTuple -> { + final ImmutableList columnKeys = columnBucketTuple.keys(); + final MultiBucketsAggregation.Bucket columnBucket = columnBucketTuple.bucket(); + + processSeries(rowBuilder, queryResult, contextWithRowBucket, pivot, new ArrayDeque<>(columnKeys), columnBucket, false, "col-leaf"); + }); + } + resultBuilder.addRow(rowBuilder.build()); + }); + + if (!pivot.rowGroups().isEmpty() && pivot.rollup()) { + final PivotResult.Row.Builder rowBuilder = PivotResult.Row.builder().key(ImmutableList.of()); + processSeries(rowBuilder, queryResult, queryContext, pivot, new ArrayDeque<>(), initialBucket, true, "row-inner"); + resultBuilder.addRow(rowBuilder.source("non-leaf").build()); + } + + return resultBuilder.build(); + } + + private Stream retrieveBuckets(Pivot pivot, List pivots, MultiBucketsAggregation.Bucket initialBucket) { + Stream result = Stream.of(PivotBucket.create(ImmutableList.of(), initialBucket, false)); + + for (BucketSpec bucketSpec : pivots) { + result = result.flatMap((tuple) -> { + final ESPivotBucketSpecHandler bucketHandler = bucketHandlers.get(bucketSpec.type()); + return bucketHandler.extractBuckets(pivot, bucketSpec, tuple); + }); + } + + return result; + } + + private MultiBucketsAggregation.Bucket createInitialBucket(SearchResponse queryResult) { + return InitialBucket.create(queryResult); + } + + private void processSeries(PivotResult.Row.Builder rowBuilder, + SearchResponse searchResult, + ESGeneratedQueryContext queryContext, + Pivot pivot, + ArrayDeque columnKeys, + HasAggregations aggregation, + boolean rollup, + String source) { + pivot.series().forEach(seriesSpec -> { + final ESPivotSeriesSpecHandler seriesHandler = this.seriesHandlers.get(seriesSpec.type()); + final Aggregation series = seriesHandler.extractAggregationFromResult(pivot, seriesSpec, aggregation, queryContext); + seriesHandler.handleResult(pivot, seriesSpec, searchResult, series, this, queryContext) + .map(value -> { + columnKeys.addLast(value.id()); + final PivotResult.Value v = PivotResult.Value.create(columnKeys, value.value(), rollup, source); + columnKeys.removeLast(); + return v; + }) + .forEach(rowBuilder::addValue); + }); + } + + private long extractDocumentCount(SearchResponse queryResult) { + return queryResult.getHits().getTotalHits().value; + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivotBucketSpecHandler.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivotBucketSpecHandler.java new file mode 100644 index 000000000000..8d713c46da7f --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivotBucketSpecHandler.java @@ -0,0 +1,111 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.searchtypes.pivot.BucketSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.BucketSpecHandler; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.PivotSort; +import org.graylog.plugins.views.search.searchtypes.pivot.PivotSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSort; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.SortSpec; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.BucketOrder; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public abstract class ESPivotBucketSpecHandler + implements BucketSpecHandler { + + protected AggTypes aggTypes(ESGeneratedQueryContext queryContext, Pivot pivot) { + return (AggTypes) queryContext.contextMap().get(pivot.id()); + } + + protected void record(ESGeneratedQueryContext queryContext, Pivot pivot, PivotSpec spec, String name, Class aggregationClass) { + aggTypes(queryContext, pivot).record(spec, name, aggregationClass); + } + + public record SortOrders(List orders, List sortingAggregations) {} + + protected SortOrders orderListForPivot(Pivot pivot, ESGeneratedQueryContext esGeneratedQueryContext, BucketOrder defaultOrder, Query query) { + final List sortingAggregations = new ArrayList<>(); + final List ordering = pivot.sort() + .stream() + .map(sortSpec -> { + final var isAscending = sortSpec.direction().equals(SortSpec.Direction.Ascending); + if (sortSpec instanceof PivotSort pivotSort) { + if (isSortOnNumericPivotField(pivot, pivotSort, esGeneratedQueryContext, query)) { + /* When we sort on a numeric pivot field, we create a metric sub-aggregation for that field, which returns + the numeric value of it, so that we can sort on it numerically. Any metric aggregation (min/max/avg) will work. */ + final var aggregationName = "sort_helper" + pivotSort.field(); + sortingAggregations.add(AggregationBuilders.max(aggregationName).field(pivotSort.field())); + return BucketOrder.aggregation(aggregationName, isAscending); + } else { + return BucketOrder.key(isAscending); + } + } + if (sortSpec instanceof SeriesSort) { + final Optional matchingSeriesSpec = pivot.series() + .stream() + .filter(series -> series.literal().equals(sortSpec.field())) + .findFirst(); + return matchingSeriesSpec + .map(seriesSpec -> { + if (seriesSpec.literal().equals("count()")) { + return BucketOrder.count(isAscending); + } + + String orderPath = seriesSpec.statsSubfieldName() + .map(subField -> esGeneratedQueryContext.seriesName(seriesSpec, pivot) + "." + subField) + .orElse(esGeneratedQueryContext.seriesName(seriesSpec, pivot)); + + return BucketOrder.aggregation(orderPath, isAscending); + }) + .orElse(null); + } + + return null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + return ordering.isEmpty() + ? new SortOrders(List.of(defaultOrder), List.of()) + : new SortOrders(ordering, List.copyOf(sortingAggregations)); + } + + private boolean isSortOnNumericPivotField(Pivot pivot, PivotSort pivotSort, ESGeneratedQueryContext queryContext, Query query) { + return queryContext.fieldType(query.effectiveStreams(pivot), pivotSort.field()) + .filter(this::isNumericFieldType) + .isPresent(); + } + + private boolean isNumericFieldType(String fieldType) { + return fieldType.equals("long") || fieldType.equals("double") || fieldType.equals("float"); + } + + public abstract Stream extractBuckets(Pivot pivot, BucketSpec bucketSpec, PivotBucket initialBucket); +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivotSeriesSpecHandler.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivotSeriesSpecHandler.java new file mode 100644 index 000000000000..b7c796a95ac5 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/ESPivotSeriesSpecHandler.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import org.graylog.plugins.views.search.engine.GeneratedQueryContext; +import org.graylog.plugins.views.search.engine.SearchTypeHandler; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.PivotSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.SeriesSpecHandler; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.HasAggregations; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; +import org.graylog.storage.elasticsearch7.views.searchtypes.ESSearchTypeHandler; + +import java.util.stream.Stream; + +public abstract class ESPivotSeriesSpecHandler + implements SeriesSpecHandler, ESGeneratedQueryContext> { + + protected AggTypes aggTypes(ESGeneratedQueryContext queryContext, Pivot pivot) { + return (AggTypes) queryContext.contextMap().get(pivot.id()); + } + + protected void record(ESGeneratedQueryContext queryContext, Pivot pivot, PivotSpec spec, String name, Class aggregationClass) { + aggTypes(queryContext, pivot).record(spec, name, aggregationClass); + } + + public Aggregation extractAggregationFromResult(Pivot pivot, PivotSpec spec, HasAggregations aggregations, ESGeneratedQueryContext queryContext) { + return aggTypes(queryContext, pivot).getSubAggregation(spec, aggregations); + } + + @SuppressWarnings("unchecked") + @Override + public Stream handleResult(Pivot pivot, SeriesSpec seriesSpec, Object queryResult, Object aggregationResult, SearchTypeHandler searchTypeHandler, GeneratedQueryContext queryContext) { + return doHandleResult(pivot, (SPEC_TYPE) seriesSpec, (SearchResponse) queryResult, (AGGREGATION_RESULT) aggregationResult, (ESSearchTypeHandler) searchTypeHandler, (ESGeneratedQueryContext) queryContext); + } + + @Override + public abstract Stream doHandleResult(Pivot pivot, SPEC_TYPE seriesSpec, SearchResponse searchResult, AGGREGATION_RESULT aggregationResult, ESSearchTypeHandler searchTypeHandler, ESGeneratedQueryContext queryContext); + + public static class Value { + + private final String id; + private final String key; + private final Object value; + + public Value(String id, String key, Object value) { + this.id = id; + this.key = key; + this.value = value; + } + + public static Value create(String id, String key, Object value) { + return new Value(id, key, value); + } + + public String id() { + return id; + } + + public String key() { + return key; + } + + public Object value() { + return value; + } + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/EffectiveTimeRangeExtractor.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/EffectiveTimeRangeExtractor.java new file mode 100644 index 000000000000..6ee2740b8ba6 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/EffectiveTimeRangeExtractor.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.Max; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.metrics.Min; +import org.graylog2.plugin.indexer.searches.timeranges.AbsoluteRange; +import org.graylog2.plugin.indexer.searches.timeranges.InvalidRangeParametersException; +import org.graylog2.plugin.indexer.searches.timeranges.RelativeRange; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class EffectiveTimeRangeExtractor { + private static final Logger LOG = LoggerFactory.getLogger(EffectiveTimeRangeExtractor.class); + private static final TimeRange ALL_MESSAGES_TIMERANGE = allMessagesTimeRange(); + private static TimeRange allMessagesTimeRange() { + try { + return RelativeRange.create(0); + } catch (InvalidRangeParametersException e) { + LOG.error("Unable to instantiate all messages timerange: ", e); + } + return null; + } + + AbsoluteRange extract(SearchResponse queryResult, Query query, Pivot pivot) { + if (queryResult.getHits().getTotalHits().value != 0) { + return getAbsoluteRangeFromAggregations(queryResult, query, pivot); + } else { + return getAbsoluteRangeFromPivot(query, pivot); + } + } + private AbsoluteRange getAbsoluteRangeFromPivot(final Query query, final Pivot pivot) { + final TimeRange pivotRange = query.effectiveTimeRange(pivot); + return AbsoluteRange.create(pivotRange.getFrom(), pivotRange.getTo()); + } + + private AbsoluteRange getAbsoluteRangeFromAggregations(final SearchResponse queryResult, final Query query, final Pivot pivot) { + final Min min = queryResult.getAggregations().get("timestamp-min"); + final Double from = min.getValue(); + final Max max = queryResult.getAggregations().get("timestamp-max"); + final Double to = max.getValue(); + final TimeRange pivotRange = query.effectiveTimeRange(pivot); + return AbsoluteRange.create( + isAllMessagesTimeRange(pivotRange) && from != 0 + ? new DateTime(from.longValue(), DateTimeZone.UTC) + : pivotRange.getFrom(), + isAllMessagesTimeRange(pivotRange) && to != 0 + ? new DateTime(to.longValue(), DateTimeZone.UTC) + : pivotRange.getTo() + ); + } + private boolean isAllMessagesTimeRange(TimeRange timeRange) { + return ALL_MESSAGES_TIMERANGE.equals(timeRange); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/InitialBucket.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/InitialBucket.java new file mode 100644 index 000000000000..d0825641df43 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/InitialBucket.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.Aggregations; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; + +import java.io.IOException; + +public class InitialBucket implements MultiBucketsAggregation.Bucket { + private final long docCount; + private final Aggregations aggregations; + + private InitialBucket(long docCount, Aggregations aggregations) { + this.docCount = docCount; + this.aggregations = aggregations; + } + + public static InitialBucket create(SearchResponse searchResponse) { + return new InitialBucket(searchResponse.getHits().getTotalHits().value, searchResponse.getAggregations()); + } + + @Override + public Object getKey() { + throw new IllegalStateException("Not implemented!"); + } + + @Override + public String getKeyAsString() { + throw new IllegalStateException("Not implemented!"); + } + + @Override + public long getDocCount() { + return this.docCount; + } + + @Override + public Aggregations getAggregations() { + return this.aggregations; + } + + @Override + public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException { + throw new IllegalStateException("Not implemented!"); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/PivotBucket.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/PivotBucket.java new file mode 100644 index 000000000000..bdecb73018ab --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/PivotBucket.java @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import com.google.common.collect.ImmutableList; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; + +public record PivotBucket(ImmutableList keys, MultiBucketsAggregation.Bucket bucket, boolean isMissingBucket) { + public static PivotBucket create(ImmutableList keys, MultiBucketsAggregation.Bucket bucket, boolean isMissingBucket) { + return new PivotBucket(keys, bucket, isMissingBucket); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/SeriesAggregationBuilder.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/SeriesAggregationBuilder.java new file mode 100644 index 000000000000..a11b2c55a16b --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/SeriesAggregationBuilder.java @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot; + +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilder; + +enum Placement { + ROOT, + ROW, + COLUMN, + METRIC +} + +public record SeriesAggregationBuilder(AggregationBuilder aggregationBuilder, Placement placement) { + public static SeriesAggregationBuilder root(AggregationBuilder aggregationBuilder) { + return new SeriesAggregationBuilder(aggregationBuilder, Placement.ROOT); + } + + public static SeriesAggregationBuilder metric(AggregationBuilder aggregationBuilder) { + return new SeriesAggregationBuilder(aggregationBuilder, Placement.METRIC); + } + + public static SeriesAggregationBuilder row(AggregationBuilder aggregationBuilder) { + return new SeriesAggregationBuilder(aggregationBuilder, Placement.ROW); + } + + public static SeriesAggregationBuilder column(AggregationBuilder aggregationBuilder) { + return new SeriesAggregationBuilder(aggregationBuilder, Placement.COLUMN); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/buckets/ESDateRangeHandler.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/buckets/ESDateRangeHandler.java new file mode 100644 index 000000000000..e7136a2cbedc --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/buckets/ESDateRangeHandler.java @@ -0,0 +1,94 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot.buckets; + +import com.google.common.collect.ImmutableList; +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.searchtypes.pivot.BucketSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.DateRangeBucket; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.range.ParsedDateRange; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.ESPivotBucketSpecHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.PivotBucket; +import org.joda.time.base.AbstractDateTime; + +import javax.annotation.Nonnull; +import java.util.stream.Stream; + +public class ESDateRangeHandler extends ESPivotBucketSpecHandler { + private static final String AGG_NAME = "agg"; + @Nonnull + @Override + public CreatedAggregations doCreateAggregation(Direction direction, String name, Pivot pivot, DateRangeBucket dateRangeBucket, ESGeneratedQueryContext queryContext, Query query) { + AggregationBuilder root = null; + AggregationBuilder leaf = null; + for (String dateRangeField : dateRangeBucket.fields()) { + final DateRangeAggregationBuilder builder = AggregationBuilders.dateRange(name).field(dateRangeField); + dateRangeBucket.ranges().forEach(r -> { + final String from = r.from().map(AbstractDateTime::toString).orElse(null); + final String to = r.to().map(AbstractDateTime::toString).orElse(null); + if (from != null && to != null) { + builder.addRange(from, to); + } else if (to != null) { + builder.addUnboundedTo(to); + } else if (from != null) { + builder.addUnboundedFrom(from); + } + }); + builder.format("date_time"); + builder.keyed(false); + + record(queryContext, pivot, dateRangeBucket, name, ParsedDateRange.class); + + if (root == null && leaf == null) { + root = builder; + leaf = builder; + } else { + leaf.subAggregation(builder); + leaf = builder; + } + } + + return CreatedAggregations.create(root, leaf); + } + + @Override + public Stream extractBuckets(Pivot pivot, BucketSpec bucketSpec, PivotBucket initialBucket) { + final ImmutableList previousKeys = initialBucket.keys(); + final MultiBucketsAggregation.Bucket previousBucket = initialBucket.bucket(); + final ParsedDateRange aggregation = previousBucket.getAggregations().get(AGG_NAME); + final DateRangeBucket dateRangeBucket = (DateRangeBucket) bucketSpec; + + return aggregation.getBuckets().stream() + .flatMap(bucket -> { + final String bucketKey = dateRangeBucket.bucketKey().equals(DateRangeBucket.BucketKey.TO) + ? bucket.getToAsString() + : bucket.getFromAsString(); + final ImmutableList keys = ImmutableList.builder() + .addAll(previousKeys) + .add(bucketKey) + .build(); + + return Stream.of(PivotBucket.create(keys, bucket, false)); + }); + } +} diff --git a/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/buckets/ESTimeHandler.java b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/buckets/ESTimeHandler.java new file mode 100644 index 000000000000..7fad03f3a438 --- /dev/null +++ b/graylog-storage-elasticsearch7/src/main/java/org/graylog/storage/elasticsearch7/views/searchtypes/pivot/buckets/ESTimeHandler.java @@ -0,0 +1,124 @@ +/* + * Copyright (C) 2020 Graylog, Inc. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the Server Side Public License, version 1, + * as published by MongoDB, Inc. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * Server Side Public License for more details. + * + * You should have received a copy of the Server Side Public License + * along with this program. If not, see + * . + */ +package org.graylog.storage.elasticsearch7.views.searchtypes.pivot.buckets; + +import com.google.common.collect.ImmutableList; +import org.graylog.plugins.views.search.Query; +import org.graylog.plugins.views.search.searchtypes.pivot.BucketSpec; +import org.graylog.plugins.views.search.searchtypes.pivot.Pivot; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.AutoInterval; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Interval; +import org.graylog.plugins.views.search.searchtypes.pivot.buckets.Time; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.AggregationBuilders; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.BucketOrder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.graylog.shaded.elasticsearch7.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.graylog.storage.elasticsearch7.views.ESGeneratedQueryContext; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.ESPivotBucketSpecHandler; +import org.graylog.storage.elasticsearch7.views.searchtypes.pivot.PivotBucket; +import org.graylog2.plugin.indexer.searches.timeranges.RelativeRange; +import org.graylog2.plugin.indexer.searches.timeranges.TimeRange; + +import javax.annotation.Nonnull; +import java.util.stream.Stream; + +public class ESTimeHandler extends ESPivotBucketSpecHandler