diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a3f4a1878e..3f40c80771 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -38,607 +38,54 @@ pool: stages: - - stage: CVE - displayName: 'CVE check' - dependsOn: [ ] - condition: and(succeeded(), ne(variables['Build.Reason'], 'Manual')) - jobs: - - job: - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - echo ">> ALL BUILD* VARIABLES " && export | grep BUILD - echo "[CVE] executing ROR_TASK=$ROR_TASK" - ci/run-pipeline.sh - continueOnError: true - env: - ROR_TASK: cve_check - - stage: TEST displayName: 'Run all tests' dependsOn: [ ] condition: and(succeeded(), ne(variables['Build.Reason'], 'Manual')) jobs: - - job: - timeoutInMinutes: 20 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[TEST] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - task: PublishTestResults@2 - condition: failed() - inputs: - testRunTitle: "$(ROR_TASK) results" - testResultsFiles: "**/TEST*xml" - mergeTestResults: true - strategy: - maxParallel: 99 - matrix: - LICENSE: - ROR_TASK: license - UNIT: - ROR_TASK: core_tests - - job: - condition: and(succeeded(), or(eq(variables.isEpic, true), eq(variables.isDevelop, true), eq(variables.isMaster, true))) - container: openjdk:22-slim - timeoutInMinutes: 120 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[TEST] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - task: PublishTestResults@2 - condition: failed() - inputs: - testRunTitle: "$(ROR_TASK) results" - testResultsFiles: "**/TEST*xml" - mergeTestResults: true - strategy: - maxParallel: 99 - matrix: - IT_es90x: - ROR_TASK: integration_es90x - - job: - condition: and(succeeded(), or(eq(variables.isEpic, true), eq(variables.isDevelop, true), eq(variables.isMaster, true))) - container: openjdk:22-slim - timeoutInMinutes: 120 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[TEST] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - task: PublishTestResults@2 - condition: failed() - inputs: - testRunTitle: "$(ROR_TASK) results" - testResultsFiles: "**/TEST*xml" - mergeTestResults: true - strategy: - maxParallel: 99 - matrix: - IT_es818x: - ROR_TASK: integration_es818x - IT_es816x: - ROR_TASK: integration_es816x - IT_es815x: - ROR_TASK: integration_es815x - IT_es814x: - ROR_TASK: integration_es814x - IT_es813x: - ROR_TASK: integration_es813x - IT_es812x: - ROR_TASK: integration_es812x - IT_es811x: - ROR_TASK: integration_es811x - IT_es810x: - ROR_TASK: integration_es810x - IT_es89x: - ROR_TASK: integration_es89x - IT_es88x: - ROR_TASK: integration_es88x - IT_es87x: - ROR_TASK: integration_es87x - IT_es85x: - ROR_TASK: integration_es85x - IT_es84x: - ROR_TASK: integration_es84x - IT_es83x: - ROR_TASK: integration_es83x - IT_es82x: - ROR_TASK: integration_es82x - IT_es81x: - ROR_TASK: integration_es81x - IT_es80x: - ROR_TASK: integration_es80x - job: condition: and(succeeded(), ne(variables.isEpic, true), ne(variables.isDevelop, true), ne(variables.isMaster, true)) - container: openjdk:22-slim timeoutInMinutes: 120 + pool: + vmImage: 'windows-2022' steps: - checkout: self fetchDepth: 1 clean: false persistCredentials: true - - script: | - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[TEST] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - task: PublishTestResults@2 - condition: failed() + - task: JavaToolInstaller@0 + displayName: Switch to Java 21 inputs: - testRunTitle: "$(ROR_TASK) results" - testResultsFiles: "**/TEST*xml" - mergeTestResults: true - strategy: - maxParallel: 99 - matrix: - IT_es90x: - ROR_TASK: integration_es90x - - job: - condition: and(succeeded(), ne(variables.isEpic, true), ne(variables.isDevelop, true), ne(variables.isMaster, true)) - container: openjdk:22-slim - timeoutInMinutes: 120 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[TEST] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - task: PublishTestResults@2 - condition: failed() - inputs: - testRunTitle: "$(ROR_TASK) results" - testResultsFiles: "**/TEST*xml" - mergeTestResults: true - strategy: - maxParallel: 99 - matrix: - IT_es818x: - ROR_TASK: integration_es818x - IT_es810x: - ROR_TASK: integration_es810x - IT_es80x: - ROR_TASK: integration_es80x - - job: - condition: and(succeeded(), or(eq(variables.isEpic, true), eq(variables.isDevelop, true), eq(variables.isMaster, true))) - timeoutInMinutes: 120 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[TEST] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) + versionSpec: '21' + jdkArchitectureOption: 'x64' + jdkSourceOption: 'PreInstalled' + - powershell: | + Install-WindowsFeature -Name containers -IncludeAllSubFeature -IncludeManagementTools + Install-WindowsFeature -Name Hyper-V -IncludeAllSubFeature -IncludeManagementTools -Restart:$false + Set-ExecutionPolicy Bypass -Scope Process -Force + Install-Module -Name DockerMsftProvider -Repository PSGallery -Force + Install-Package -Name docker -ProviderName DockerMsftProvider -Force + Start-Service docker + docker version + displayName: 'Install Docker on Windows Server' + - powershell: | + docker ps + Write-Host ">>> $env:ES_MODULE => Running testcontainers.." + .\gradlew ror-tools:test "-PesModule=$env:ES_MODULE" + if ($LASTEXITCODE -ne 0) { + Get-ChildItem -Recurse -Filter *hs_err* | ForEach-Object { + Get-Content $_.FullName + } + exit 1 + } - task: PublishTestResults@2 condition: failed() inputs: - testRunTitle: "$(ROR_TASK) results" + testRunTitle: "$(Windows ES_MODULE) results" testResultsFiles: "**/TEST*xml" mergeTestResults: true strategy: maxParallel: 99 matrix: - IT_es717x: - ROR_TASK: integration_es717x - IT_es716x: - ROR_TASK: integration_es716x - IT_es714x: - ROR_TASK: integration_es714x - IT_es711x: - ROR_TASK: integration_es711x - IT_es710x: - ROR_TASK: integration_es710x - IT_es79x: - ROR_TASK: integration_es79x - IT_es78x: - ROR_TASK: integration_es78x - IT_es77x: - ROR_TASK: integration_es77x - IT_es74x: - ROR_TASK: integration_es74x - IT_es73x: - ROR_TASK: integration_es73x - IT_es72x: - ROR_TASK: integration_es72x - IT_es70x: - ROR_TASK: integration_es70x - IT_es67x: - ROR_TASK: integration_es67x - - job: - condition: and(succeeded(), ne(variables.isEpic, true), ne(variables.isDevelop, true), ne(variables.isMaster, true)) - timeoutInMinutes: 120 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[TEST] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - task: PublishTestResults@2 - condition: failed() - inputs: - testRunTitle: "$(ROR_TASK) results" - testResultsFiles: "**/TEST*xml" - mergeTestResults: true - strategy: - maxParallel: 99 - matrix: - IT_es717x: - ROR_TASK: integration_es717x - IT_es710x: - ROR_TASK: integration_es710x - IT_es70x: - ROR_TASK: integration_es70x - IT_es67x: - ROR_TASK: integration_es67x - - - stage: BUILD_ROR - displayName: 'Build ROR plugins' - dependsOn: - - TEST - condition: | - and( - succeeded(), - succeeded('TEST'), - eq(variables.isPullRequest, true), - ne(variables['Build.Reason'], 'Manual') - ) - jobs: - - job: - timeoutInMinutes: 120 - steps: - - checkout: self - fetchDepth: 1 - clean: false - - - script: | - set -e - - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[BUILD_ROR] executing ROR_TASK = $ROR_TASK" - echo ">>> ($ROR_TASK) Building ROR plugins" && ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - strategy: - maxParallel: 99 - matrix: - BUILD_9xx: - ROR_TASK: build_es9xx - BUILD_8xx: - ROR_TASK: build_es8xx - BUILD_7xx: - ROR_TASK: build_es7xx - BUILD_6xx: - ROR_TASK: build_es6xx - - - stage: DETERMINE_CI_TYPE - displayName: 'Determine if this is release run' - dependsOn: - - TEST - condition: | - and( - succeeded(), - succeeded('TEST'), - or(eq(variables.isDevelop, true), eq(variables.isMaster, true)), - ne(variables['Build.Reason'], 'Manual') - ) - jobs: - - job: EXTRACT_IS_RELEASE - steps: - - bash: | - IS_RELEASE=true - if grep '^pluginVersion=' gradle.properties | awk -F= '{print $2}' | grep "\-pre"; then - IS_RELEASE=false - fi - echo "##vso[task.setvariable variable=value;isOutput=true]$IS_RELEASE" - name: IsRelease - - - stage: UPLOAD_PRE_ROR - displayName: 'Upload to S3 ROR plugin pre-builds' - dependsOn: - - DETERMINE_CI_TYPE - - TEST - condition: | - and( - succeeded(), - succeeded('TEST'), - or(eq(variables.isDevelop, true), eq(variables.isMaster, true)), - eq(dependencies.DETERMINE_CI_TYPE.outputs['EXTRACT_IS_RELEASE.IsRelease.value'], false), - ne(variables['Build.Reason'], 'Manual') - ) - jobs: - - job: - timeoutInMinutes: 600 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - - script: | - set -e - - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - echo "[UPLOAD_PRE_ROR] executing ROR_TASK = $ROR_TASK" - echo ">>> ($ROR_TASK) Uploading pre-ROR" && ci/run-pipeline.sh - timeoutInMinutes: 600 - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - - strategy: - maxParallel: 99 - matrix: - UPLOAD_PRE_9xx: - ROR_TASK: upload_pre_es9xx - UPLOAD_PRE_8xx: - ROR_TASK: upload_pre_es8xx - UPLOAD_PRE_7xx: - ROR_TASK: upload_pre_es7xx - UPLOAD_PRE_6xx: - ROR_TASK: upload_pre_es6xx - - - stage: RELEASE_ROR - displayName: 'Release ROR plugins' - dependsOn: - - DETERMINE_CI_TYPE - - TEST - condition: | - and( - succeeded(), - succeeded('TEST'), - or(eq(variables.isDevelop, true), eq(variables.isMaster, true)), - eq(dependencies.DETERMINE_CI_TYPE.outputs['EXTRACT_IS_RELEASE.IsRelease.value'], true), - ne(variables['Build.Reason'], 'Manual') - ) - jobs: - - job: - timeoutInMinutes: 180 - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - timeoutInMinutes: 180 - - - script: | - set -e - - echo ">>>> Installing dependencies with apt-get" - sudo apt-get update && sudo apt-get install -y git file - git status && echo ">>> Git installed correctly!" - - # Translate back env vars to avoid cyclical reference :/ - export aws_access_key_id=$var_aws_access_key_id - export aws_secret_access_key=$var_aws_secret_access_key - - export DOCKER=docker - if ! docker login -u $var_docker_registry_user -p $var_docker_registry_password; then - echo "Error: Failed to login to Docker registry" - exit 1 - fi - - echo "[RELEASE_ROR] executing ROR_TASK = $ROR_TASK" - echo ">>> ($ROR_TASK) Releasing ROR" && ci/run-pipeline.sh - env: - var_aws_access_key_id: $(aws_access_key_id) - var_aws_secret_access_key: $(aws_secret_access_key) - var_docker_registry_user: $(DOCKER_REGISTRY_USER) - var_docker_registry_password: $(DOCKER_REGISTRY_PASSWORD) - - strategy: - maxParallel: 99 - matrix: - RELEASE_ES_9xx: - ROR_TASK: release_es9xx - RELEASE_ES_8xx: - ROR_TASK: release_es8xx - RELEASE_ES_7xx: - ROR_TASK: release_es7xx - RELEASE_ES_6xx: - ROR_TASK: release_es6xx - - - stage: BUILD_MVN_ARTIFACTS - displayName: 'Build Maven artifacts' - dependsOn: - - TEST - condition: | - and( - succeeded(), - succeeded('TEST'), - ne(variables.isMaster, true), - ne(variables['Build.Reason'], 'Manual') - ) - jobs: - - job: - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - echo "[BUILD_MVN_ARTIFACTS] executing ROR_TASK = $ROR_TASK" - ci/run-pipeline.sh - env: - ROR_TASK: audit_compile - - - stage: PUBLISH_MVN_ARTIFACTS - displayName: 'Publish Maven artifacts' - dependsOn: - - DETERMINE_CI_TYPE - - TEST - condition: | - and( - succeeded(), - succeeded('TEST'), - eq(variables.isMaster, true), - eq(dependencies.DETERMINE_CI_TYPE.outputs['EXTRACT_IS_RELEASE.IsRelease.value'], true), - ne(variables['Build.Reason'], 'Manual') - ) - jobs: - - job: - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - # Populate the global variable mvn_status for later - - script: | - PLUGIN_VER=$(awk -F= '$1=="pluginVersion" {print $2}' gradle.properties) - URL="https://oss.sonatype.org/service/local/repositories/releases/content/tech/beshu/ror/audit_2.12/$PLUGIN_VER/" - echo "Maven artifact URL to check: $URL" - MVN_STATUS=$(curl --write-out '%{http_code}' --output /dev/null "$URL") - echo "##vso[task.setvariable variable=mvn_status]$MVN_STATUS" - - - script: | - echo ">> MVN_STATUS WAS 200. Artifact already present, no need to proceed further with Maven publishing." - condition: eq(200, variables.mvn_status) - - - script: | - echo ">> MVN_STATUS WAS 404. Artifact not present in Maven repository, proceeding with publishing." - condition: eq(404, variables.mvn_status) - - - task: DownloadSecureFile@1 - name: pgp - displayName: 'Download secret.pgp secret file' - inputs: - secureFile: 'secret.pgp' - - - script: | - echo Installing $(pgp.secureFilePath) to directory... - sudo chown root:root $(pgp.secureFilePath) - sudo chmod a+r $(pgp.secureFilePath) - mkdir .travis - sudo ln -s -t .travis/ $(pgp.secureFilePath) - echo "secret.pgp MD5SUSM `md5sum .travis/secret.pgp`" - condition: eq(404, variables.mvn_status) - - - script: | - echo "[MVN_PUBLISH] executing ROR_TASK=$ROR_TASK" - export MAVEN_REPO_PASSWORD=$VAR_MAVEN_REPO_PASSWORD - export MAVEN_REPO_USER=$VAR_MAVEN_REPO_USER - export MAVEN_STAGING_PROFILE_ID=$VAR_MAVEN_STAGING_PROFILE_ID - export GPG_KEY_ID=$VAR_GPG_KEY_ID - export GPG_PASSPHRASE=$VAR_GPG_PASSPHRASE - echo ">>> ($ROR_TASK) Publishing MVN artifacts" && ci/run-pipeline.sh - env: - ROR_TASK: publish_maven_artifacts - VAR_MAVEN_REPO_PASSWORD: $(MAVEN_REPO_PASSWORD) - VAR_MAVEN_REPO_USER: $(MAVEN_REPO_USER) - VAR_MAVEN_STAGING_PROFILE_ID: $(MAVEN_STAGING_PROFILE_ID) - VAR_GPG_PASSPHRASE: $(GPG_PASSPHRASE) - VAR_GPG_KEY_ID: $(GPG_KEY_ID) - condition: eq(404, variables.mvn_status) - - - stage: PRE_BUILDS_DOCKER_IMAGE_PUBLISHING - displayName: 'Publish docker images for specified pre-builds' - dependsOn: [ ] - condition: and(succeeded(), eq(variables['Build.Reason'], 'Manual')) - jobs: - - job: PUBLISH - displayName: 'Publishing' - timeoutInMinutes: "600" - steps: - - checkout: self - fetchDepth: 1 - clean: false - persistCredentials: true - - script: | - set -ex - - export DOCKER=docker - docker login -u $var_docker_registry_user -p $var_docker_registry_password - if ! docker login -u $var_docker_registry_user -p $var_docker_registry_password; then - echo "Error: Failed to login to Docker registry" - exit 1 - fi - - export BUILD_ROR_ES_VERSIONS="${{ parameters.preBuildVersionsForPublishingToDockerHub }}" - - if [ -z "$(echo "$BUILD_ROR_ES_VERSIONS" | tr -d '[:space:],')" ]; then - echo "Error: No ES versions specified for publishing ROR pre-builds" - exit 1 - fi - - echo "[RELEASE_ROR] executing ROR_TASK = $ROR_TASK" - echo ">>> ($ROR_TASK) Publish docker images for specified pre-builds" && ci/run-pipeline.sh - timeoutInMinutes: "600" - condition: succeeded() - env: - ROR_TASK: publish_pre_builds_docker_images - var_docker_registry_user: $(DOCKER_REGISTRY_USER) - var_docker_registry_password: $(DOCKER_REGISTRY_PASSWORD) + IT_es90x: + ES_MODULE: es90x \ No newline at end of file diff --git a/ror-tools/src/main/scala/tech/beshu/ror/tools/RorToolsApp.scala b/ror-tools/src/main/scala/tech/beshu/ror/tools/RorToolsApp.scala index a3711c8afe..588d1c2fad 100644 --- a/ror-tools/src/main/scala/tech/beshu/ror/tools/RorToolsApp.scala +++ b/ror-tools/src/main/scala/tech/beshu/ror/tools/RorToolsApp.scala @@ -227,7 +227,7 @@ trait RorTools { private lazy val esPathOption = opt[String]("es-path") - .text(s"Path to elasticsearch directory; default=${EsDirectory.defaultPath}") + .text(s"Path to elasticsearch directory; default=/usr/share/elasticsearch") .validate { path => Try(os.Path(path)) .toEither diff --git a/ror-tools/src/test/resources/logback.xml b/ror-tools/src/test/resources/logback.xml index 761252ff02..06693d99d2 100644 --- a/ror-tools/src/test/resources/logback.xml +++ b/ror-tools/src/test/resources/logback.xml @@ -6,8 +6,8 @@ - - + + diff --git a/ror-tools/src/test/scala/tech/beshu/ror/tools/RorToolsAppSuite.scala b/ror-tools/src/test/scala/tech/beshu/ror/tools/RorToolsAppSuite.scala index a04e075c56..57e17d6fd1 100644 --- a/ror-tools/src/test/scala/tech/beshu/ror/tools/RorToolsAppSuite.scala +++ b/ror-tools/src/test/scala/tech/beshu/ror/tools/RorToolsAppSuite.scala @@ -76,6 +76,7 @@ class RorToolsAppSuite |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" .stripMargin + .replace("\r\n", "\n") ) } "Patching successful for ES installation that was not patched (with consent given in arg in format with =)" in { @@ -89,6 +90,7 @@ class RorToolsAppSuite |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" .stripMargin + .replace("\r\n", "\n") ) } "Patching successful for ES installation that was not patched (with consent given in interactive mode)" in { @@ -103,7 +105,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Patching successful first time, on second try not started because already patched" in { @@ -116,7 +118,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) val (secondResult, secondOutput) = captureResultAndOutput { RorToolsTestApp.run(Array("patch", "--I_UNDERSTAND_AND_ACCEPT_ES_PATCHING", "yes", "--es-path", esLocalPath.toString))(_) @@ -125,7 +127,7 @@ class RorToolsAppSuite secondOutput should include( """Checking if Elasticsearch is patched ... |ERROR: Elasticsearch is already patched with current version""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) } "Patching not started when user declines to accept implications of patching (in arg)" in { @@ -136,7 +138,7 @@ class RorToolsAppSuite output should equal( """You have to confirm, that You understand the implications of ES patching in order to perform it. |You can read about patching in our documentation: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch. - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Patching not started when user declines to accept implications of patching (in interactive mode)" in { @@ -149,7 +151,7 @@ class RorToolsAppSuite """Elasticsearch needs to be patched to work with ReadonlyREST. You can read about patching in our documentation: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch. |Do you understand the implications of ES patching? (yes/no): You have to confirm, that You understand the implications of ES patching in order to perform it. |You can read about patching in our documentation: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch. - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Patching not started when --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING arg is not provided and console input is not possible" in { @@ -160,11 +162,11 @@ class RorToolsAppSuite result should equal(Result.Failure) output should equal( """|Elasticsearch needs to be patched to work with ReadonlyREST. You can read about patching in our documentation: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch. - |Do you understand the implications of ES patching? (yes/no):""".stripMargin + " " + + |Do you understand the implications of ES patching? (yes/no):""".stripMargin.replace("\r\n", "\n") + " " + """| |It seems that the answer was not given or the ror-tools are executed in the environment that does not support console input. |Consider using silent mode and provide the answer using the parameter --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING, read more in our documentation https://docs.readonlyrest.com/elasticsearch#id-5.-patch-elasticsearch. - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Patching not started when --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING value is empty" in { @@ -175,11 +177,11 @@ class RorToolsAppSuite result should equal(Result.Failure) output should equal( """|Elasticsearch needs to be patched to work with ReadonlyREST. You can read about patching in our documentation: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch. - |Do you understand the implications of ES patching? (yes/no):""".stripMargin + " " + + |Do you understand the implications of ES patching? (yes/no):""".stripMargin.replace("\r\n", "\n") + " " + """| |It seems that the answer was not given or the ror-tools are executed in the environment that does not support console input. |Consider using silent mode and provide the answer using the parameter --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING, read more in our documentation https://docs.readonlyrest.com/elasticsearch#id-5.-patch-elasticsearch. - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Patching not started because of not existing directory" in { @@ -206,7 +208,8 @@ class RorToolsAppSuite |verify is a command that verifies if ES installation is patched | --es-path Path to elasticsearch directory; default=/usr/share/elasticsearch | - | -h, --help prints this usage text""".stripMargin + | -h, --help prints this usage text + |""".stripMargin.replace("\r\n", "\n") ) } "Patching not started because there is a metadata file indicating that the ES is already patched" in { @@ -223,7 +226,7 @@ class RorToolsAppSuite output should include( """Checking if Elasticsearch is patched ... |ERROR: Elasticsearch was patched using ROR 0.0.1 patcher. It should be unpatched using ROR 0.0.1 and patched again with current ROR patcher. ReadonlyREST cannot be started. For patching instructions see our docs: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Unpatching is not started when metadata file is missing" in { @@ -236,7 +239,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) patchMetadataFile.exists() should be(true) @@ -251,8 +254,8 @@ class RorToolsAppSuite """Checking if Elasticsearch is patched ... |ERROR: Elasticsearch is either patched by an older version of ROR or corrupted. | - if ES has been patched using some older ROR version, then try unpatching using that older ROR version - | - otherwise the ES installation is corrupted and ES must be reinstalled""" - .stripMargin + | - otherwise the ES installation is corrupted and ES must be reinstalled + |""".stripMargin.replace("\r\n", "\n") ) } "Unpatching not started because ES is already patched by different version" in { @@ -269,7 +272,7 @@ class RorToolsAppSuite output should include( """Checking if Elasticsearch is patched ... |ERROR: Elasticsearch was patched using ROR 0.0.1 patcher. It should be unpatched using ROR 0.0.1 and patched again with current ROR patcher. ReadonlyREST cannot be started. For patching instructions see our docs: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Verify correctly recognizes that patch is not applied" in { @@ -281,7 +284,7 @@ class RorToolsAppSuite verifyOutput should include( """Checking if Elasticsearch is patched ... |Elasticsearch is NOT patched. ReadonlyREST cannot be used yet. For patching instructions see our docs: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) } "Verify detects patch when metadata file is present" in { @@ -295,7 +298,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) patchMetadataFile.exists() should be(true) @@ -307,7 +310,7 @@ class RorToolsAppSuite verifyOutput should include( """Checking if Elasticsearch is patched ... |Elasticsearch is patched! ReadonlyREST can be used""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) } @@ -320,8 +323,8 @@ class RorToolsAppSuite """Checking if Elasticsearch is patched ... |Creating backup ... |Patching ... - |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + |Elasticsearch is patched! ReadonlyREST is ready to use + |""".stripMargin.replace("\r\n", "\n") ) patchMetadataFile.exists() should be(true) @@ -339,7 +342,7 @@ class RorToolsAppSuite | - otherwise the ES installation is corrupted and ES must be reinstalled |Problems: | - backup catalog is present, but there is no metadata file - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } @@ -353,7 +356,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) patchMetadataFile.exists() should be(true) @@ -376,7 +379,7 @@ class RorToolsAppSuite | - file x-pack-core-$esVersionUsed.jar was patched by ROR ${metadata.rorVersion} | - file x-pack-ilm-$esVersionUsed.jar was patched by ROR ${metadata.rorVersion} | - file x-pack-security-$esVersionUsed.jar was patched by ROR ${metadata.rorVersion} - |""".stripMargin + |""".stripMargin.replace("\r\n", "\n") ) } "Successfully patch, verify and unpatch" in { @@ -391,7 +394,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) val hashAfterPatching = FileUtils.calculateHash(esLocalPath) @@ -404,7 +407,7 @@ class RorToolsAppSuite verifyOutput should include( """Checking if Elasticsearch is patched ... |Elasticsearch is patched! ReadonlyREST can be used""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) patchMetadataFile.exists() should be(true) @@ -418,7 +421,7 @@ class RorToolsAppSuite """Checking if Elasticsearch is patched ... |Elasticsearch is currently patched, restoring ... |Elasticsearch is unpatched! ReadonlyREST can be removed now""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) val hashAfterUnpatching = FileUtils.calculateHash(esLocalPath) @@ -437,7 +440,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) // Verify @@ -449,12 +452,12 @@ class RorToolsAppSuite verifyOutput should include( """Checking if Elasticsearch is patched ... |Elasticsearch is patched! ReadonlyREST can be used""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) patchMetadataFile.exists() should be(true) // Modify expected hash - simulate one of the files being modified - modifyMetadataFile {metadata => + modifyMetadataFile { metadata => val lastHash = metadata.patchedFilesMetadata.last val modifiedList = metadata.patchedFilesMetadata.init :+ lastHash.copy(hash = lastHash.hash + "abc") metadata.copy(patchedFilesMetadata = modifiedList) @@ -469,7 +472,7 @@ class RorToolsAppSuite unpatchOutput should include( """Checking if Elasticsearch is patched ... |ERROR: Elasticsearch was patched, but files""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) unpatchOutput should include("were modified after patching") } @@ -484,7 +487,7 @@ class RorToolsAppSuite |Creating backup ... |Patching ... |Elasticsearch is patched! ReadonlyREST is ready to use""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) // Verify @@ -496,7 +499,7 @@ class RorToolsAppSuite verifyOutput should include( """Checking if Elasticsearch is patched ... |Elasticsearch is patched! ReadonlyREST can be used""" - .stripMargin + .stripMargin.replace("\r\n", "\n") ) patchMetadataFile.exists() should be(true) @@ -519,7 +522,8 @@ class RorToolsAppSuite | 1. Unpatch the older ES version using ror-tools | 2. Upgrade to the newer ES version | 3. Patch ES after the upgrade using ror-tools - |For patching instructions see our docs: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch""".stripMargin + |For patching instructions see our docs: https://docs.readonlyrest.com/elasticsearch#id-3.-patch-elasticsearch + |""".stripMargin.replace("\r\n", "\n") ) } } @@ -549,13 +553,13 @@ class RorToolsAppSuite private def captureResultAndOutput(block: InOut => Result): (Result, String) = { val inOut = new CapturingOutputAndMockingInput() val result = block(inOut) - (result, inOut.getOutputBuffer) + (result, inOut.getOutputBuffer.replace("\r\n", "\n")) } private def captureResultAndOutputWithInteraction(block: InOut => Result, response: Option[String]): (Result, String) = { val inOut = new CapturingOutputAndMockingInput(response) val result = block(inOut) - (result, inOut.getOutputBuffer) + (result, inOut.getOutputBuffer.replace("\r\n", "\n")) } override protected def afterEach(): Unit = { @@ -591,4 +595,12 @@ class RorToolsAppSuite FileUtils.unTar(Path.of(s"$localPath/elasticsearch.tar"), Path.of(s"$esLocalPath")) } + def showSpecialChars(s: String): Unit = + println(s.flatMap { + case '\n' => "\\n" + case '\r' => "\\r" + case '\t' => "\\t" + case c => c.toString + }) + } diff --git a/ror-tools/src/test/scala/tech/beshu/ror/tools/utils/ExampleEsWithRorContainer.scala b/ror-tools/src/test/scala/tech/beshu/ror/tools/utils/ExampleEsWithRorContainer.scala index fdbb4d3113..ca1e9389e7 100644 --- a/ror-tools/src/test/scala/tech/beshu/ror/tools/utils/ExampleEsWithRorContainer.scala +++ b/ror-tools/src/test/scala/tech/beshu/ror/tools/utils/ExampleEsWithRorContainer.scala @@ -105,7 +105,7 @@ class ExampleEsWithRorContainer(implicit scheduler: Scheduler) extends EsContain ), initializer = nodeDataInitializer, startedClusterDependencies = startedClusterDependencies, - customEntrypoint = Some(Path("""/bin/sh -c "while true; do sleep 30; done"""")), + customEntrypoint = Some("""/bin/sh -c "while true; do sleep 30; done""""), awaitingReadyStrategy = AwaitingReadyStrategy.ImmediatelyTreatAsReady, ) } diff --git a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/EsContainerWithRorAndXpackSecurity.scala b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/EsContainerWithRorAndXpackSecurity.scala index 83000424be..2fc066558d 100644 --- a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/EsContainerWithRorAndXpackSecurity.scala +++ b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/EsContainerWithRorAndXpackSecurity.scala @@ -18,7 +18,6 @@ package tech.beshu.ror.utils.containers import com.typesafe.scalalogging.StrictLogging import org.testcontainers.images.builder.ImageFromDockerfile -import os.Path import tech.beshu.ror.utils.containers.ElasticsearchNodeWaitingStrategy.AwaitingReadyStrategy import tech.beshu.ror.utils.containers.images.domain.Enabled import tech.beshu.ror.utils.containers.images.{DockerImageCreator, Elasticsearch, ReadonlyRestWithEnabledXpackSecurityPlugin} @@ -63,7 +62,7 @@ object EsContainerWithRorAndXpackSecurity extends StrictLogging { securityConfig: ReadonlyRestWithEnabledXpackSecurityPlugin.Config, initializer: ElasticsearchNodeDataInitializer, startedClusterDependencies: StartedClusterDependencies, - customEntrypoint: Option[Path], + customEntrypoint: Option[String], awaitingReadyStrategy: AwaitingReadyStrategy): EsContainer = { create( esVersion = esVersion, @@ -82,7 +81,7 @@ object EsContainerWithRorAndXpackSecurity extends StrictLogging { securityConfig: ReadonlyRestWithEnabledXpackSecurityPlugin.Config, initializer: ElasticsearchNodeDataInitializer, startedClusterDependencies: StartedClusterDependencies, - customEntrypoint: Option[Path], + customEntrypoint: Option[String], performPatching: Boolean, awaitingReadyStrategy: AwaitingReadyStrategy): EsContainer = { val rorContainer = new EsContainerWithRorAndXpackSecurity( @@ -101,7 +100,7 @@ object EsContainerWithRorAndXpackSecurity extends StrictLogging { private def esImageWithRorAndXpackFromDockerfile(esVersion: String, esConfig: Elasticsearch.Config, securityConfig: ReadonlyRestWithEnabledXpackSecurityPlugin.Config, - customEntrypoint: Option[Path], + customEntrypoint: Option[String], performPatching: Boolean) = { DockerImageCreator.create( Elasticsearch.create(esVersion, esConfig) diff --git a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageCreator.scala b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageCreator.scala index 8e6ab0da5d..f8c69a6113 100644 --- a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageCreator.scala +++ b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageCreator.scala @@ -21,6 +21,7 @@ import org.testcontainers.images.builder.ImageFromDockerfile import org.testcontainers.images.builder.dockerfile.DockerfileBuilder import tech.beshu.ror.utils.containers.images.DockerImageDescription.Command import tech.beshu.ror.utils.containers.images.DockerImageDescription.Command.{ChangeUser, Run} +import tech.beshu.ror.utils.containers.images.PathUtils.linuxPath object DockerImageCreator extends StrictLogging { @@ -43,7 +44,7 @@ object DockerImageCreator extends StrictLogging { .copyFiles .foldLeft(to) { case (dockerfile, copyFile) => - dockerfile.withFileFromFile(copyFile.destination.toIO.getAbsolutePath, copyFile.file.toJava) + dockerfile.withFileFromFile(linuxPath(copyFile.destination.toIO.getAbsolutePath), copyFile.file.toJava) } } @@ -71,7 +72,7 @@ object DockerImageCreator extends StrictLogging { imageDescription .copyFiles .foldLeft(builder) { case (b, file) => - b.copy(file.destination.toString(), file.destination.toString()) + b.copy(linuxPath(file.destination.toString()), linuxPath(file.destination.toString())) } } @@ -84,7 +85,7 @@ object DockerImageCreator extends StrictLogging { def setEntrypointFrom(imageDescription: DockerImageDescription): DockerfileBuilder = { imageDescription .entrypoint - .foldLeft(builder) { case (b, entrypoint) => b.entryPoint(entrypoint.toIO.getAbsolutePath) } + .foldLeft(builder) { case (b, entrypoint) => b.entryPoint(entrypoint) } } } diff --git a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageDescription.scala b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageDescription.scala index 3164e39158..a24317deb6 100644 --- a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageDescription.scala +++ b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/DockerImageDescription.scala @@ -24,7 +24,7 @@ final case class DockerImageDescription(baseImage: String, runCommands: Seq[Command], copyFiles: Set[CopyFile], envs: Set[Env], - entrypoint: Option[Path]) { + entrypoint: Option[String]) { def run(command: String): DockerImageDescription = { this.copy(runCommands = this.runCommands :+ Command.Run(command)) @@ -61,7 +61,7 @@ final case class DockerImageDescription(baseImage: String, this.copy(envs = this.envs ++ envs.map { case (k, v) => Env(k, v) }) } - def setEntrypoint(entrypoint: Path): DockerImageDescription = { + def setEntrypoint(entrypoint: String): DockerImageDescription = { this.copy(entrypoint = Some(entrypoint)) } } @@ -75,7 +75,7 @@ object DockerImageDescription { final case class CopyFile(destination: Path, file: File) final case class Env(name: String, value: String) - def create(image: String, customEntrypoint: Option[Path] = None): DockerImageDescription = DockerImageDescription( + def create(image: String, customEntrypoint: Option[String] = None): DockerImageDescription = DockerImageDescription( baseImage = image, runCommands = Seq.empty, copyFiles = Set.empty, diff --git a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/Elasticsearch.scala b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/Elasticsearch.scala index b9363f58a3..1c70c0b3b2 100644 --- a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/Elasticsearch.scala +++ b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/Elasticsearch.scala @@ -22,6 +22,7 @@ import com.typesafe.scalalogging.LazyLogging import os.Path import tech.beshu.ror.utils.containers.ContainerUtils import tech.beshu.ror.utils.containers.images.Elasticsearch.* +import tech.beshu.ror.utils.containers.images.PathUtils.linuxPath import tech.beshu.ror.utils.misc.Version object Elasticsearch { @@ -54,7 +55,7 @@ object Elasticsearch { class Elasticsearch(esVersion: String, config: Config, plugins: Seq[Plugin], - customEntrypoint: Option[Path]) + customEntrypoint: Option[String]) extends LazyLogging { def this(esVersion: String, config: Config) = { @@ -72,7 +73,7 @@ class Elasticsearch(esVersion: String, new Elasticsearch(esVersion, config, plugins :+ plugin, customEntrypoint) } - def setEntrypoint(entrypoint: Path): Elasticsearch = { + def setEntrypoint(entrypoint: String): Elasticsearch = { new Elasticsearch(esVersion, config, plugins, Some(entrypoint)) } @@ -91,7 +92,7 @@ class Elasticsearch(esVersion: String, // Package tar is required by the RorToolsAppSuite, and the ES >= 9.x is based on // Red Hat Universal Base Image 9 Minimal, which does not contain it. .runWhen(Version.greaterOrEqualThan(esVersion, 9, 0, 0), "microdnf install -y tar") - .run(s"chown -R elasticsearch:elasticsearch ${configDir.toString()}") + .run(s"chown -R elasticsearch:elasticsearch ${linuxPath(configDir.toString())}") .addEnvs(config.envs + ("ES_JAVA_OPTS" -> javaOptsBasedOn(withEsJavaOptsBuilderFromPlugins))) .installPlugins() .user("elasticsearch") diff --git a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/PathUtils.scala b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/PathUtils.scala new file mode 100644 index 0000000000..dfdb06f2d9 --- /dev/null +++ b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/PathUtils.scala @@ -0,0 +1,31 @@ +/* + * This file is part of ReadonlyREST. + * + * ReadonlyREST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ReadonlyREST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with ReadonlyREST. If not, see http://www.gnu.org/licenses/ + */ +package tech.beshu.ror.utils.containers.images + +import os.Path + +object PathUtils { + + def linuxPath(path: Path): String = { + linuxPath(path.toString) + } + + def linuxPath(str: String): String = { + str.replace("C:", "").replace("\\", "/") + } + +} diff --git a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/ReadonlyRestPlugin.scala b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/ReadonlyRestPlugin.scala index d9cd584b5f..aee58397ae 100644 --- a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/ReadonlyRestPlugin.scala +++ b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/ReadonlyRestPlugin.scala @@ -18,6 +18,7 @@ package tech.beshu.ror.utils.containers.images import better.files.* import tech.beshu.ror.utils.containers.images.Elasticsearch.{configDir, esDir, fromResourceBy} +import tech.beshu.ror.utils.containers.images.PathUtils.linuxPath import tech.beshu.ror.utils.containers.images.ReadonlyRestPlugin.Config import tech.beshu.ror.utils.containers.images.ReadonlyRestPlugin.Config.{Attributes, InternodeSsl, RestSsl} import tech.beshu.ror.utils.containers.images.domain.{Enabled, SourceFile} @@ -106,17 +107,17 @@ class ReadonlyRestPlugin(esVersion: String, private implicit class InstallRorPlugin(val image: DockerImageDescription) { def installRorPlugin(): DockerImageDescription = { image - .run(s"${esDir.toString()}/bin/elasticsearch-plugin install --batch file:///tmp/${config.rorPlugin.name}") + .run(s"${linuxPath(esDir)}/bin/elasticsearch-plugin install --batch file:///tmp/${config.rorPlugin.name}") } def patchES(): DockerImageDescription = { image .user("root") .runWhen(Version.greaterOrEqualThan(esVersion, 7, 0, 0), - command = s"${esDir.toString()}/jdk/bin/java -jar ${esDir.toString()}/plugins/readonlyrest/ror-tools.jar patch --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING=yes" + command = s"${linuxPath(esDir)}/jdk/bin/java -jar ${linuxPath(esDir)}/plugins/readonlyrest/ror-tools.jar patch --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING=yes" ) .runWhen(Version.greaterOrEqualThan(esVersion, 6, 5, 0) && Version.lowerThan(esVersion, 7, 0, 0), - command = s"$$JAVA_HOME/bin/java -jar ${esDir.toString()}/plugins/readonlyrest/ror-tools.jar patch --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING=yes" + command = s"$$JAVA_HOME/bin/java -jar ${linuxPath(esDir)}/plugins/readonlyrest/ror-tools.jar patch --I_UNDERSTAND_AND_ACCEPT_ES_PATCHING=yes" ) .user("elasticsearch") } @@ -131,7 +132,7 @@ class ReadonlyRestPlugin(esVersion: String, .copyFile(configDir / "ror-truststore.bcfks", fromResourceBy(name = "ror-truststore.bcfks")) .copyFile(configDir / "elastic-certificates.bcfks", fromResourceBy(name = "elastic-certificates.bcfks")) .runWhen(Version.greaterOrEqualThan(esVersion, 7, 10, 0), - s"cat ${configDir.toString()}/additional-permissions.policy >> ${esDir.toString()}/jdk/conf/security/java.policy" + s"cat ${linuxPath(configDir)}/additional-permissions.policy >> ${linuxPath(esDir)}/jdk/conf/security/java.policy" ) } else { diff --git a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/XpackSecurityPlugin.scala b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/XpackSecurityPlugin.scala index 508a7ad253..7e35e1b34c 100644 --- a/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/XpackSecurityPlugin.scala +++ b/tests-utils/src/main/scala/tech/beshu/ror/utils/containers/images/XpackSecurityPlugin.scala @@ -17,6 +17,7 @@ package tech.beshu.ror.utils.containers.images import tech.beshu.ror.utils.containers.images.Elasticsearch.{configDir, esDir, fromResourceBy} +import tech.beshu.ror.utils.containers.images.PathUtils.linuxPath import tech.beshu.ror.utils.containers.images.XpackSecurityPlugin.Config import tech.beshu.ror.utils.containers.images.XpackSecurityPlugin.Config.Attributes import tech.beshu.ror.utils.misc.Version @@ -116,10 +117,10 @@ class XpackSecurityPlugin(esVersion: String, ) } - private def createKeystoreCommand = s"${esDir.toString()}/bin/elasticsearch-keystore create" + private def createKeystoreCommand = s"${linuxPath(esDir)}/bin/elasticsearch-keystore create" private def addToKeystoreCommand(key: String, value: String) = { - s"printf '$value\\n' | ${esDir.toString()}/bin/elasticsearch-keystore add $key" + s"printf '$value\\n' | ${linuxPath(esDir)}/bin/elasticsearch-keystore add $key" } } }