Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removes checked in js zip #403

Merged
merged 3 commits into from
Jul 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/multi-node-test-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
- name: Checkout Branch
uses: actions/checkout@v2
- name: Run integration tests with multi node config
run: ./gradlew integTest -PnumNodes=3 -Dopensearch.version=1.3.0-SNAPSHOT
run: ./gradlew integTest -PnumNodes=3
- name: Upload failed logs
uses: actions/upload-artifact@v2
if: failure()
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-and-build-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
# Job name
name: Build Index Management
env:
BUILD_ARGS: -D"opensearch.version=1.3.0-SNAPSHOT" ${{ matrix.os_build_args }}
BUILD_ARGS: ${{ matrix.os_build_args }}
WORKING_DIR: ${{ matrix.working_directory }}.
strategy:
# This setting says that all jobs should finish, even if one fails
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@ out/
*.log
http
.project
.settings
.settings
src/test/resources/job-scheduler/
46 changes: 37 additions & 9 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,19 @@ import java.util.function.Predicate

buildscript {
ext {
opensearch_version = System.getProperty("opensearch.version", "1.3.0-SNAPSHOT")
opensearch_version = System.getProperty("opensearch.version", "1.3.3-SNAPSHOT")
// 1.1.0 -> 1.1.0.0, and 1.1.0-SNAPSHOT -> 1.1.0.0-SNAPSHOT
opensearch_build = opensearch_version.replaceAll(/(\.\d)([^\d]*)$/, '$1.0$2')
notification_version = System.getProperty("notification.version", opensearch_build)
common_utils_version = System.getProperty("common_utils.version", opensearch_build)
job_scheduler_version = System.getProperty("job_scheduler_version.version", opensearch_build)
kotlin_version = System.getProperty("kotlin.version", "1.4.0")

opensearch_no_snapshot = opensearch_version.replace("-SNAPSHOT","")
job_scheduler_no_snapshot = job_scheduler_version.replace("-SNAPSHOT","")
job_scheduler_resource_folder = "src/test/resources/job-scheduler"
job_scheduler_build_download = 'https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/' + opensearch_no_snapshot +
'/latest/linux/x64/tar/builds/opensearch/plugins/opensearch-job-scheduler-' + job_scheduler_no_snapshot + '.zip'
}

repositories {
Expand Down Expand Up @@ -228,6 +234,14 @@ test {
systemProperty 'tests.security.manager', 'false'
}

ext.getPluginResource = { download_to_folder, download_from_src ->
project.mkdir download_to_folder
ant.get(src: download_from_src,
dest: download_to_folder,
httpusecaches: false)
return fileTree(download_to_folder).getSingleFile()
}

File repo = file("$buildDir/testclusters/repo")
def _numNodes = findProperty('numNodes') as Integer ?: 1
testClusters.integTest {
Expand All @@ -245,10 +259,23 @@ testClusters.integTest {
debugPort += 1
}
}
plugin(provider({
new RegularFile() {
@Override
File getAsFile() { fileTree("src/test/resources/job-scheduler").getSingleFile() }

plugin(provider(new Callable<RegularFile>(){
@Override
RegularFile call() throws Exception {
return new RegularFile() {
@Override
File getAsFile() {
if (new File("$project.rootDir/$job_scheduler_resource_folder").exists()) {
project.delete(files("$project.rootDir/$job_scheduler_resource_folder"))
}
project.mkdir job_scheduler_resource_folder
ant.get(src: job_scheduler_build_download,
dest: job_scheduler_resource_folder,
httpusecaches: false)
return fileTree(job_scheduler_resource_folder).getSingleFile()
}
}
}
}))

Expand Down Expand Up @@ -346,7 +373,7 @@ String bwcFilePath = "src/test/resources/bwc/"
testClusters {
"${baseName}$i" {
testDistribution = "ARCHIVE"
versions = ["7.10.2", "1.3.0-SNAPSHOT"]
versions = ["7.10.2", opensearch_version]
numberOfNodes = 3
plugin(provider(new Callable<RegularFile>(){
@Override
Expand Down Expand Up @@ -384,14 +411,16 @@ List<Provider<RegularFile>> plugins = []
task prepareBwcTests {
dependsOn bundlePlugin
doLast {
// Download the job scheduler test dependency
getPluginResource(job_scheduler_resource_folder, job_scheduler_build_download)
plugins = [
provider(new Callable<RegularFile>(){
@Override
RegularFile call() throws Exception {
return new RegularFile() {
@Override
File getAsFile() {
return fileTree("src/test/resources/job-scheduler").getSingleFile()
return fileTree(job_scheduler_resource_folder).getSingleFile()
}
}
}
Expand Down Expand Up @@ -539,7 +568,7 @@ testClusters.mixedCluster {
node.plugin(provider({
new RegularFile() {
@Override
File getAsFile() { fileTree("src/test/resources/job-scheduler").getSingleFile() }
File getAsFile() { fileTree(job_scheduler_resource_folder).getSingleFile() }
}
}))

Expand All @@ -553,7 +582,6 @@ testClusters.mixedCluster {
} else {
node.plugin(project.tasks.bundlePlugin.archiveFile)
}
node.plugins.each { println("plugin in the node: ${it.get()}") }
}
setting 'path.repo', repo.absolutePath
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -297,8 +297,8 @@ class ManagedIndexCoordinator(
val managedIndices = getManagedIndices(event.indicesDeleted().map { it.uuid })
val deletedIndices = event.indicesDeleted().map { it.name }
val allIndicesUuid = indexMetadataProvider.getMultiTypeISMIndexMetadata(indexNames = deletedIndices).map { (_, metadataMapForType) ->
metadataMapForType.values
}
metadataMapForType.values.map { it.indexUuid }
}.flatten().toSet()
// Check if the deleted index uuid is still part of any metadata service in the cluster and has an existing managed index job
indicesToClean = event.indicesDeleted().filter { it.uuid in managedIndices.keys && !allIndicesUuid.contains(it.uuid) }
removeManagedIndexReq = indicesToClean.map { deleteManagedIndexRequest(it.uuid) }
Expand Down
Binary file not shown.