-
Notifications
You must be signed in to change notification settings - Fork 25.6k
Support building Iron Bank Docker context #64336
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
e703b17
20e8e59
8a3f3bc
ff2ecd7
ad9140f
2171b24
1024710
4a87d27
e8c07b5
051daff
d544309
f275e30
82f8bda
cd9accf
01cf6cc
295e0f1
9363ecc
bbe471d
5971080
c905fc5
b3f0211
480c833
b1b074d
d446dce
40fd596
aa44b5d
3f7034b
b3b7028
b1a3644
eda4eed
7a8b2f3
2f297e4
b36f021
fbde6b1
ea8948e
fa55777
dd5ee8c
cfe7b4c
cfe941c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -6,6 +6,9 @@ import org.elasticsearch.gradle.VersionProperties | |
| import org.elasticsearch.gradle.docker.DockerBuildTask | ||
| import org.elasticsearch.gradle.info.BuildParams | ||
| import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin | ||
|
|
||
| import java.nio.file.Path | ||
|
|
||
| apply plugin: 'elasticsearch.standalone-rest-test' | ||
| apply plugin: 'elasticsearch.test.fixtures' | ||
| apply plugin: 'elasticsearch.internal-distribution-download' | ||
|
|
@@ -46,6 +49,15 @@ ext.expansions = { Architecture architecture, boolean oss, DockerBase base, bool | |
|
|
||
| final String elasticsearch = "elasticsearch-${oss ? 'oss-' : ''}${VersionProperties.elasticsearch}-${classifier}.tar.gz" | ||
|
|
||
| String buildArgs = '#' | ||
| if (base == DockerBase.IRON_BANK) { | ||
| buildArgs = """ | ||
| ARG BASE_REGISTRY=nexus-docker-secure.levelup-nexus.svc.cluster.local:18082 | ||
| ARG BASE_IMAGE=redhat/ubi/ubi8 | ||
| ARG BASE_TAG=8.2 | ||
| """ | ||
| } | ||
|
|
||
| /* Both the following Dockerfile commands put the resulting artifact at | ||
| * the same location, regardless of classifier, so that the commands that | ||
| * follow in the Dockerfile don't have to know about the runtime | ||
|
|
@@ -61,58 +73,87 @@ RUN curl --retry 8 -S -L \\ | |
| """.trim() | ||
| } | ||
|
|
||
| def (major,minor) = VersionProperties.elasticsearch.split("\\.") | ||
|
|
||
| return [ | ||
| 'base_image' : base.getImage(), | ||
| 'bin_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'bin', | ||
| 'build_args' : buildArgs, | ||
| 'build_date' : BuildParams.buildDate, | ||
| 'config_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'config', | ||
| 'git_revision' : BuildParams.gitRevision, | ||
| 'license' : oss ? 'Apache-2.0' : 'Elastic-License', | ||
| 'package_manager' : base == DockerBase.UBI ? 'microdnf' : 'yum', | ||
| 'source_elasticsearch': sourceElasticsearch, | ||
| 'docker_base' : base.name().toLowerCase(), | ||
| 'version' : VersionProperties.elasticsearch | ||
| 'version' : VersionProperties.elasticsearch, | ||
| 'major_minor_version' : "${major}.${minor}" | ||
| ] | ||
| } | ||
|
|
||
| /** | ||
| * This filter squashes long runs of newlines so that the output | ||
| * is a little more aesthetically pleasing. | ||
| */ | ||
| class SquashNewlinesFilter extends FilterReader { | ||
| SquashNewlinesFilter(Reader input) { | ||
| super(new StringReader(input.text.replaceAll("\n{2,}", "\n\n"))) | ||
| } | ||
| } | ||
|
|
||
| private static String buildPath(Architecture architecture, boolean oss, DockerBase base) { | ||
| return 'build/' + | ||
| (architecture == Architecture.AARCH64 ? 'aarch64-' : '') + | ||
| (oss ? 'oss-' : '') + | ||
| (base == DockerBase.UBI ? 'ubi-' : '') + | ||
| (base == DockerBase.UBI ? 'ubi-' : (base == DockerBase.IRON_BANK ? 'ironbank-' : '')) + | ||
| 'docker' | ||
| } | ||
|
|
||
| private static String taskName(String prefix, Architecture architecture, boolean oss, DockerBase base, String suffix) { | ||
| return prefix + | ||
| (architecture == Architecture.AARCH64 ? 'Aarch64' : '') + | ||
| (oss ? 'Oss' : '') + | ||
| (base == DockerBase.UBI ? 'Ubi' : '') + | ||
| (base == DockerBase.UBI ? 'Ubi' : (base == DockerBase.IRON_BANK ? 'IronBank' : '')) + | ||
| suffix | ||
| } | ||
|
|
||
| project.ext { | ||
| dockerBuildContext = { Architecture architecture, boolean oss, DockerBase base, boolean local -> | ||
| copySpec { | ||
| into('bin') { | ||
| from project.projectDir.toPath().resolve("src/docker/bin") | ||
| } | ||
|
|
||
| into('config') { | ||
| /* | ||
| * The OSS and default distributions have different configurations, therefore we want to allow overriding the default configuration | ||
| * from files in the 'oss' sub-directory. We don't want the 'oss' sub-directory to appear in the final build context, however. | ||
| */ | ||
| duplicatesStrategy = DuplicatesStrategy.EXCLUDE | ||
| from(project.projectDir.toPath().resolve("src/docker/config")) { | ||
| exclude 'oss' | ||
| final Map<String,String> varExpansions = expansions(architecture, oss, base, local) | ||
| final Path projectDir = project.projectDir.toPath() | ||
|
|
||
| if (base == DockerBase.IRON_BANK) { | ||
| into('scripts') { | ||
| from projectDir.resolve("src/docker/bin") | ||
| from(projectDir.resolve("src/docker/config")) { | ||
| exclude '**/oss' | ||
| } | ||
| } | ||
| from(projectDir.resolve("src/docker/iron_bank")) { | ||
| expand(varExpansions) | ||
| } | ||
| if (oss) { | ||
| // Overlay the config file | ||
| from project.projectDir.toPath().resolve("src/docker/config/oss") | ||
| } else { | ||
| into('bin') { | ||
| from projectDir.resolve("src/docker/bin") | ||
| } | ||
|
|
||
| into('config') { | ||
| // The OSS and default distribution can have different configuration, therefore we want to | ||
| // allow overriding the default configuration by creating config files in oss or default | ||
| // build-context sub-modules. | ||
| duplicatesStrategy = DuplicatesStrategy.INCLUDE | ||
| from projectDir.resolve("src/docker/config") | ||
| if (oss) { | ||
| from projectDir.resolve("src/docker/config/oss") | ||
| } | ||
| } | ||
| } | ||
|
|
||
| from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) { | ||
| expand(expansions(architecture, oss, base, local)) | ||
| expand(varExpansions) | ||
| filter SquashNewlinesFilter | ||
| } | ||
| } | ||
| } | ||
|
|
@@ -324,6 +365,8 @@ subprojects { Project subProject -> | |
|
|
||
| final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64 | ||
| final boolean oss = subProject.name.contains('oss-') | ||
| // We can ignore Iron Bank at the moment as we don't | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Just so I understand, we are effectively not building the iron bank image here becuase we have not added a corresponding "export' project for it, yes?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, there's no export task, and there won't be without a way to automatically build the image. That would require simulating the Iron Bank image build process. |
||
| // build those images ourselves. | ||
| final DockerBase base = subProject.name.contains('ubi-') ? DockerBase.UBI : DockerBase.CENTOS | ||
|
|
||
| final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : '' | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,14 @@ | ||
| import org.elasticsearch.gradle.Architecture | ||
| import org.elasticsearch.gradle.DockerBase | ||
|
|
||
| apply plugin: 'base' | ||
|
|
||
| tasks.register("buildIronBankDockerBuildContext", Tar) { | ||
| archiveExtension = 'tar.gz' | ||
| compression = Compression.GZIP | ||
| archiveClassifier = "docker-build-context" | ||
| archiveBaseName = "elasticsearch-ironbank" | ||
| // We always treat Iron Bank builds as local, because that is how they | ||
| // are built | ||
| with dockerBuildContext(Architecture.X64, false, DockerBase.IRON_BANK, true) | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -3,6 +3,7 @@ | |
| # | ||
| # Beginning of multi stage Dockerfile | ||
| ################################################################################ | ||
|
|
||
| <% /* | ||
| This file is passed through Groovy's SimpleTemplateEngine, so dollars and backslashes | ||
| have to be escaped in order for them to appear in the final Dockerfile. You | ||
|
|
@@ -13,13 +14,16 @@ | |
| We use control-flow tags in this file to conditionally render the content. The | ||
| layout/presentation here has been adjusted so that it looks reasonable when rendered, | ||
| at the slight expense of how it looks here. | ||
|
|
||
| Note that this file is also filtered to squash together newlines, so we can | ||
| add as many newlines here as necessary to improve legibility. | ||
| */ %> | ||
|
|
||
| <% if (docker_base == "ubi") { %> | ||
| ################################################################################ | ||
| # Build stage 0 `builder`: | ||
| # Extract Elasticsearch artifact | ||
| ################################################################################ | ||
|
|
||
| FROM ${base_image} AS builder | ||
|
|
||
| # Install required packages to extract the Elasticsearch distribution | ||
|
|
@@ -44,7 +48,21 @@ RUN set -eux ; \\ | |
| rm \${tini_bin}.sha256sum ; \\ | ||
| mv \${tini_bin} /bin/tini ; \\ | ||
| chmod +x /bin/tini | ||
|
|
||
| <% } else if (docker_base == 'iron_bank') { %> | ||
| ${build_args} | ||
|
|
||
| FROM ${base_image} AS builder | ||
|
|
||
| # `tini` is a tiny but valid init for containers. This is used to cleanly | ||
| # control how ES and any child processes are shut down. | ||
| COPY tini /bin/tini | ||
| RUN chmod 0755 /bin/tini | ||
|
|
||
| <% } else { %> | ||
|
|
||
| <% /* CentOS builds are actaully a custom base image with a minimal set of dependencies */ %> | ||
|
|
||
| ################################################################################ | ||
| # Stage 1. Build curl statically. Installing it from RPM on CentOS pulls in too | ||
| # many dependencies. | ||
|
|
@@ -194,6 +212,7 @@ COPY --from=curl /work/curl /rootfs/usr/bin/curl | |
| # Step 3. Fetch the Elasticsearch distribution and configure it for Docker | ||
| ################################################################################ | ||
| FROM ${base_image} AS builder | ||
|
|
||
| <% } %> | ||
|
|
||
| RUN mkdir /usr/share/elasticsearch | ||
|
|
@@ -202,16 +221,17 @@ WORKDIR /usr/share/elasticsearch | |
| # Fetch the appropriate Elasticsearch distribution for this architecture | ||
| ${source_elasticsearch} | ||
|
|
||
| RUN tar zxf /opt/elasticsearch.tar.gz --strip-components=1 | ||
| RUN tar -zxf /opt/elasticsearch.tar.gz --strip-components=1 | ||
|
|
||
| # Configure the distribution for Docker | ||
| RUN sed -i -e 's/ES_DISTRIBUTION_TYPE=tar/ES_DISTRIBUTION_TYPE=docker/' /usr/share/elasticsearch/bin/elasticsearch-env | ||
| RUN mkdir -p config config/jvm.options.d data logs | ||
| RUN mkdir -p config config/jvm.options.d data logs plugins | ||
| RUN chmod 0775 config config/jvm.options.d data logs plugins | ||
| COPY config/elasticsearch.yml config/log4j2.properties config/ | ||
| COPY ${config_dir}/elasticsearch.yml ${config_dir}/log4j2.properties config/ | ||
| RUN chmod 0660 config/elasticsearch.yml config/log4j2.properties | ||
|
|
||
| <% if (docker_base == "ubi") { %> | ||
| <% if (docker_base == "ubi" || docker_base == "iron_bank") { %> | ||
|
|
||
| ################################################################################ | ||
| # Build stage 1 (the actual Elasticsearch image): | ||
| # | ||
|
|
@@ -221,6 +241,8 @@ RUN chmod 0660 config/elasticsearch.yml config/log4j2.properties | |
|
|
||
| FROM ${base_image} | ||
|
|
||
| <% if (docker_base == "ubi") { %> | ||
|
|
||
| RUN for iter in {1..10}; do \\ | ||
| ${package_manager} update --setopt=tsflags=nodocs -y && \\ | ||
| ${package_manager} install --setopt=tsflags=nodocs -y \\ | ||
|
|
@@ -231,11 +253,26 @@ RUN for iter in {1..10}; do \\ | |
| done; \\ | ||
| (exit \$exit_code) | ||
|
|
||
| %> } else { %> | ||
|
|
||
| <% | ||
| /* Reviews of the Iron Bank Dockerfile said that they preferred simpler */ | ||
| /* scripting so this version doesn't have the retry loop featured above. */ | ||
| %> | ||
| RUN ${package_manager} update --setopt=tsflags=nodocs -y && \\ | ||
| ${package_manager} install --setopt=tsflags=nodocs -y \\ | ||
| nc shadow-utils zip unzip && \\ | ||
| ${package_manager} clean all | ||
|
|
||
| <% } %> | ||
|
|
||
| RUN groupadd -g 1000 elasticsearch && \\ | ||
| adduser -u 1000 -g 1000 -G 0 -d /usr/share/elasticsearch elasticsearch && \\ | ||
| chmod 0775 /usr/share/elasticsearch && \\ | ||
| chown -R 1000:0 /usr/share/elasticsearch | ||
|
|
||
| <% } else { %> | ||
|
|
||
| ################################################################################ | ||
| # Stage 4. Build the final image, using the rootfs above as the basis, and | ||
| # copying in the Elasticsearch distribution | ||
|
|
@@ -250,13 +287,15 @@ RUN addgroup -g 1000 elasticsearch && \\ | |
| addgroup elasticsearch root && \\ | ||
| chmod 0775 /usr/share/elasticsearch && \\ | ||
| chgrp 0 /usr/share/elasticsearch | ||
|
|
||
| <% } %> | ||
|
|
||
| ENV ELASTIC_CONTAINER true | ||
|
|
||
| WORKDIR /usr/share/elasticsearch | ||
| COPY --from=builder --chown=1000:0 /usr/share/elasticsearch /usr/share/elasticsearch | ||
| <% if (docker_base == "ubi") { %> | ||
|
|
||
| <% if (docker_base == "ubi" || docker_base == "iron_bank") { %> | ||
| COPY --from=builder --chown=0:0 /bin/tini /bin/tini | ||
| <% } %> | ||
|
|
||
|
|
@@ -267,7 +306,7 @@ RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/elasticsearch/jdk | |
|
|
||
| ENV PATH /usr/share/elasticsearch/bin:\$PATH | ||
|
|
||
| COPY bin/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh | ||
| COPY ${bin_dir}/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh | ||
|
|
||
| # 1. The JDK's directories' permissions don't allow `java` to be executed under a different | ||
| # group to the default. Fix this. | ||
|
|
@@ -303,7 +342,8 @@ LABEL org.label-schema.build-date="${build_date}" \\ | |
| org.opencontainers.image.url="https://www.elastic.co/products/elasticsearch" \\ | ||
| org.opencontainers.image.vendor="Elastic" \\ | ||
| org.opencontainers.image.version="${version}" | ||
| <% if (docker_base == 'ubi') { %> | ||
|
|
||
| <% if (docker_base == 'ubi' || docker_base == 'iron_bank') { %> | ||
| LABEL name="Elasticsearch" \\ | ||
| maintainer="[email protected]" \\ | ||
| vendor="Elastic" \\ | ||
|
|
@@ -324,6 +364,10 @@ ENTRYPOINT ["/bin/tini", "--", "/usr/local/bin/docker-entrypoint.sh"] | |
| # Dummy overridable parameter parsed by entrypoint | ||
| CMD ["eswrapper"] | ||
|
|
||
| <% if (docker_base == 'iron_bank') { %> | ||
| HEALTHCHECK --interval=10s --timeout=5s --start-period=1m --retries=5 CMD curl -I -f --max-time 5 http://localhost:9200 || exit 1 | ||
| <% } %> | ||
|
|
||
| ################################################################################ | ||
| # End of multi-stage Dockerfile | ||
| ################################################################################ | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,2 @@ | ||
| # Ignore any locally downloaded or dropped releases | ||
| *.tar.gz |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,2 @@ | ||
| @Library('DCCSCR@master') _ | ||
| dccscrPipeline(version: '${version}') |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,37 @@ | ||
| # Elasticsearch | ||
|
|
||
| **Elasticsearch** is a distributed, RESTful search and analytics engine capable of | ||
| solving a growing number of use cases. As the heart of the Elastic Stack, it | ||
| centrally stores your data so you can discover the expected and uncover the | ||
| unexpected. | ||
|
|
||
| For more information about Elasticsearch, please visit | ||
| https://www.elastic.co/products/elasticsearch. | ||
|
|
||
| ### Installation instructions | ||
|
|
||
| Please follow the documentation on [how to install Elasticsearch with Docker](https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html). | ||
|
|
||
| ### Where to file issues and PRs | ||
|
|
||
| - [Issues](https://github.com/elastic/elasticsearch/issues) | ||
| - [PRs](https://github.com/elastic/elasticsearch/pulls) | ||
|
|
||
| ### Where to get help | ||
|
|
||
| - [Elasticsearch Discuss Forums](https://discuss.elastic.co/c/elasticsearch) | ||
| - [Elasticsearch Documentation](https://www.elastic.co/guide/en/elasticsearch/reference/master/index.html) | ||
|
|
||
| ### Still need help? | ||
|
|
||
| You can learn more about the Elastic Community and also understand how to get more help | ||
| visiting [Elastic Community](https://www.elastic.co/community). | ||
|
|
||
|
|
||
| This software is governed by the [Elastic | ||
| License](https://github.com/elastic/elasticsearch/blob/${major_minor_version}/licenses/ELASTIC-LICENSE.txt), | ||
| and includes the full set of [free | ||
| features](https://www.elastic.co/subscriptions). | ||
|
|
||
| View the detailed release notes | ||
| [here](https://www.elastic.co/guide/en/elasticsearch/reference/${major_minor_version}/es-release-notes.html). |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,20 @@ | ||
| { | ||
| "resources": [ | ||
| { | ||
| "url": "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${version}-linux-x86_64.tar.gz", | ||
| "filename": "elasticsearch-${version}-linux-x86_64.tar.gz", | ||
| "validation": { | ||
| "type": "sha512", | ||
| "value": "<insert hash here>" | ||
| } | ||
| }, | ||
| { | ||
| "url": "https://github.com/krallin/tini/releases/download/v0.19.0/tini-amd64", | ||
| "filename": "tini", | ||
| "validation": { | ||
| "type": "sha256", | ||
| "value": "93dcc18adc78c65a028a84799ecf8ad40c936fdfc5f2a57b1acda5a8117fa82c" | ||
| } | ||
| } | ||
| ] | ||
| } |
Uh oh!
There was an error while loading. Please reload this page.