Skip to content

Commit

Permalink
Merge branch 'main' into fix-yamlt-est
Browse files Browse the repository at this point in the history
# Conflicts:
#	muted-tests.yml
  • Loading branch information
nielsbauman committed Sep 7, 2024
2 parents 0b1c8b5 + daf35f3 commit b9b0e5c
Show file tree
Hide file tree
Showing 49 changed files with 766 additions and 147 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,27 +12,40 @@
* This class models the different Docker base images that are used to build Docker distributions of Elasticsearch.
*/
public enum DockerBase {
DEFAULT("ubuntu:20.04", ""),
DEFAULT("ubuntu:20.04", "", "apt-get"),

// "latest" here is intentional, since the image name specifies "8"
UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi8"),
UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi8", "microdnf"),

// The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build
IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank"),
IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"),

// Base image with extras for Cloud
CLOUD("ubuntu:20.04", "-cloud"),
CLOUD("ubuntu:20.04", "-cloud", "apt-get"),

// Based on CLOUD above, with more extras. We don't set a base image because
// we programmatically extend from the Cloud image.
CLOUD_ESS(null, "-cloud-ess");
CLOUD_ESS(null, "-cloud-ess", "apt-get"),

// Chainguard based wolfi image with latest jdk
WOLFI(
"docker.elastic.co/wolfi/chainguard-base:latest@sha256:c16d3ad6cebf387e8dd2ad769f54320c4819fbbaa21e729fad087c7ae223b4d0",
"wolfi",
"apk"
);

private final String image;
private final String suffix;
private final String packageManager;

DockerBase(String image, String suffix) {
this(image, suffix, "apt-get");
}

DockerBase(String image, String suffix, String packageManager) {
this.image = image;
this.suffix = suffix;
this.packageManager = packageManager;
}

public String getImage() {
Expand All @@ -42,4 +55,8 @@ public String getImage() {
public String getSuffix() {
return suffix;
}

public String getPackageManager() {
return packageManager;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,9 @@ private static String distributionProjectName(ElasticsearchDistribution distribu
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS) {
return projectName + "cloud-ess-docker" + archString + "-export";
}
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_WOLFI) {
return projectName + "wolfi-docker" + archString + "-export";
}
return projectName + distribution.getType().getName();
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.gradle.internal.distribution;

import org.elasticsearch.gradle.ElasticsearchDistributionType;

public class DockerWolfiElasticsearchDistributionType implements ElasticsearchDistributionType {

DockerWolfiElasticsearchDistributionType() {}

@Override
public String getName() {
return "dockerWolfi";
}

@Override
public boolean isDocker() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ public class InternalElasticsearchDistributionTypes {
public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_CLOUD = new DockerCloudElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType();

public static List<ElasticsearchDistributionType> ALL_INTERNAL = List.of(
DEB,
Expand All @@ -28,6 +29,7 @@ public class InternalElasticsearchDistributionTypes {
DOCKER_UBI,
DOCKER_IRONBANK,
DOCKER_CLOUD,
DOCKER_CLOUD_ESS
DOCKER_CLOUD_ESS,
DOCKER_WOLFI
);
}
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM;

/**
Expand Down Expand Up @@ -93,6 +94,7 @@ public void apply(Project project) {

for (ElasticsearchDistribution distribution : testDistributions) {
String taskname = destructiveDistroTestTaskName(distribution);
ElasticsearchDistributionType type = distribution.getType();
TaskProvider<Test> destructiveTask = configureTestTask(project, taskname, distribution, t -> {
t.onlyIf(
"Docker is not available",
Expand All @@ -106,12 +108,13 @@ public void apply(Project project) {
if (distribution.getPlatform() == Platform.WINDOWS) {
windowsTestTasks.add(destructiveTask);
} else {
linuxTestTasks.computeIfAbsent(distribution.getType(), k -> new ArrayList<>()).add(destructiveTask);
linuxTestTasks.computeIfAbsent(type, k -> new ArrayList<>()).add(destructiveTask);
}
destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask));
lifecycleTasks.get(distribution.getType()).configure(t -> t.dependsOn(destructiveTask));
TaskProvider<?> lifecycleTask = lifecycleTasks.get(type);
lifecycleTask.configure(t -> t.dependsOn(destructiveTask));

if ((distribution.getType() == DEB || distribution.getType() == RPM) && distribution.getBundledJdk()) {
if ((type == DEB || type == RPM) && distribution.getBundledJdk()) {
for (Version version : BuildParams.getBwcVersions().getIndexCompatible()) {
final ElasticsearchDistribution bwcDistro;
if (version.equals(Version.fromString(distribution.getVersion()))) {
Expand All @@ -121,7 +124,7 @@ public void apply(Project project) {
bwcDistro = createDistro(
allDistributions,
distribution.getArchitecture(),
distribution.getType(),
type,
distribution.getPlatform(),
distribution.getBundledJdk(),
version.toString()
Expand All @@ -147,6 +150,7 @@ private static Map<ElasticsearchDistributionType, TaskProvider<?>> lifecycleTask
lifecyleTasks.put(DOCKER_IRONBANK, project.getTasks().register(taskPrefix + ".docker-ironbank"));
lifecyleTasks.put(DOCKER_CLOUD, project.getTasks().register(taskPrefix + ".docker-cloud"));
lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess"));
lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi"));
lifecyleTasks.put(ARCHIVE, project.getTasks().register(taskPrefix + ".archives"));
lifecyleTasks.put(DEB, project.getTasks().register(taskPrefix + ".packages"));
lifecyleTasks.put(RPM, lifecyleTasks.get(DEB));
Expand Down
2 changes: 2 additions & 0 deletions distribution/docker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@ the [DockerBase] enum.
* Default - this is what most people use, and is based on Ubuntu
* UBI - the same as the default image, but based upon [RedHat's UBI
images][ubi], specifically their minimal flavour.
* Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev)
* Iron Bank - this is the US Department of Defence's repository of digitally
signed, binary container images including both Free and Open-Source
software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is
another UBI build, this time on the regular UBI image, with extra
hardening. See below for more details.

* Cloud - this is mostly the same as the default image, with some notable differences:
* `filebeat` and `metricbeat` are included
* `wget` is included
Expand Down
13 changes: 1 addition & 12 deletions distribution/docker/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ apply plugin: 'elasticsearch.dra-artifacts'
String buildId = providers.systemProperty('build.id').getOrNull()
boolean useLocalArtifacts = buildId != null && buildId.isBlank() == false && useDra == false



repositories {
// Define a repository that allows Gradle to fetch a resource from GitHub. This
// is only used to fetch the `tini` binary, when building the Iron Bank docker image
Expand Down Expand Up @@ -131,7 +129,7 @@ ext.expansions = { Architecture architecture, DockerBase base ->
'config_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'config',
'git_revision' : BuildParams.gitRevision,
'license' : base == DockerBase.IRON_BANK ? 'Elastic License 2.0' : 'Elastic-License-2.0',
'package_manager' : base == DockerBase.IRON_BANK ? 'yum' : (base == DockerBase.UBI ? 'microdnf' : 'apt-get'),
'package_manager' : base.packageManager,
'docker_base' : base.name().toLowerCase(),
'version' : VersionProperties.elasticsearch,
'major_minor_version': "${major}.${minor}",
Expand Down Expand Up @@ -182,21 +180,12 @@ ext.dockerBuildContext = { Architecture architecture, DockerBase base ->
from projectDir.resolve("src/docker/config")
}
}

from(projectDir.resolve("src/docker/Dockerfile")) {
expand(varExpansions)
filter SquashNewlinesFilter
}
}
}
//
//def createAndSetWritable(Object... locations) {
// locations.each { location ->
// File file = file(location)
// file.mkdirs()
// file.setWritable(true, false)
// }
//}

tasks.register("copyNodeKeyMaterial", Sync) {
def certsDir = file("build/certs")
Expand Down
66 changes: 46 additions & 20 deletions distribution/docker/src/docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -43,29 +43,34 @@ RUN chmod 0555 /bin/tini
# Install required packages to extract the Elasticsearch distribution
<% if (docker_base == 'default' || docker_base == 'cloud') { %>
RUN <%= retry.loop(package_manager, "${package_manager} update && DEBIAN_FRONTEND=noninteractive ${package_manager} install -y curl ") %>
<% } else if (docker_base == "wolfi") { %>
RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && ${package_manager} update && ${package_manager} update && ${package_manager} add --no-cache curl") %>
<% } else { %>
RUN <%= retry.loop(package_manager, "${package_manager} install -y findutils tar gzip") %>
<% } %>
# `tini` is a tiny but valid init for containers. This is used to cleanly
# control how ES and any child processes are shut down.
#
# The tini GitHub page gives instructions for verifying the binary using
# gpg, but the keyservers are slow to return the key and this can fail the
# build. Instead, we check the binary against the published checksum.
RUN set -eux ; \\
tini_bin="" ; \\
case "\$(arch)" in \\
aarch64) tini_bin='tini-arm64' ;; \\
x86_64) tini_bin='tini-amd64' ;; \\
*) echo >&2 ; echo >&2 "Unsupported architecture \$(arch)" ; echo >&2 ; exit 1 ;; \\
esac ; \\
curl --retry 10 -S -L -O https://github.com/krallin/tini/releases/download/v0.19.0/\${tini_bin} ; \\
curl --retry 10 -S -L -O https://github.com/krallin/tini/releases/download/v0.19.0/\${tini_bin}.sha256sum ; \\
sha256sum -c \${tini_bin}.sha256sum ; \\
rm \${tini_bin}.sha256sum ; \\
mv \${tini_bin} /bin/tini ; \\
chmod 0555 /bin/tini
<% if (docker_base != 'wolfi') { %>
# `tini` is a tiny but valid init for containers. This is used to cleanly
# control how ES and any child processes are shut down.
# For wolfi we pick it from the blessed wolfi package registry.
#
# The tini GitHub page gives instructions for verifying the binary using
# gpg, but the keyservers are slow to return the key and this can fail the
# build. Instead, we check the binary against the published checksum.
RUN set -eux ; \\
tini_bin="" ; \\
case "\$(arch)" in \\
aarch64) tini_bin='tini-arm64' ;; \\
x86_64) tini_bin='tini-amd64' ;; \\
*) echo >&2 ; echo >&2 "Unsupported architecture \$(arch)" ; echo >&2 ; exit 1 ;; \\
esac ; \\
curl --retry 10 -S -L -O https://github.com/krallin/tini/releases/download/v0.19.0/\${tini_bin} ; \\
curl --retry 10 -S -L -O https://github.com/krallin/tini/releases/download/v0.19.0/\${tini_bin}.sha256sum ; \\
sha256sum -c \${tini_bin}.sha256sum ; \\
rm \${tini_bin}.sha256sum ; \\
mv \${tini_bin} /bin/tini ; \\
chmod 0555 /bin/tini
<% } %>
<% } %>
Expand Down Expand Up @@ -152,6 +157,15 @@ RUN ${package_manager} update --setopt=tsflags=nodocs -y && \\
nc shadow-utils zip findutils unzip procps-ng && \\
${package_manager} clean all
<% } else if (docker_base == "wolfi") { %>
RUN <%= retry.loop(package_manager,
"export DEBIAN_FRONTEND=noninteractive && \n" +
" ${package_manager} update && \n" +
" ${package_manager} upgrade && \n" +
" ${package_manager} add --no-cache \n" +
" bash ca-certificates curl libsystemd netcat-openbsd p11-kit p11-kit-trust shadow tini unzip zip zstd && \n" +
" rm -rf /var/cache/apk/* "
) %>
<% } else if (docker_base == "default" || docker_base == "cloud") { %>
# Change default shell to bash, then install required packages with retries.
Expand Down Expand Up @@ -185,6 +199,11 @@ RUN groupadd -g 1000 elasticsearch && \\
adduser --uid 1000 --gid 1000 --home /usr/share/elasticsearch elasticsearch && \\
adduser elasticsearch root && \\
chown -R 0:0 /usr/share/elasticsearch
<% } else if (docker_base == "wolfi") { %>
RUN groupadd -g 1000 elasticsearch && \
adduser -G elasticsearch -u 1000 elasticsearch -D --home /usr/share/elasticsearch elasticsearch && \
adduser elasticsearch root && \
chown -R 0:0 /usr/share/elasticsearch
<% } else { %>
RUN groupadd -g 1000 elasticsearch && \\
adduser -u 1000 -g 1000 -G 0 -d /usr/share/elasticsearch elasticsearch && \\
Expand All @@ -196,7 +215,9 @@ ENV ELASTIC_CONTAINER true
WORKDIR /usr/share/elasticsearch
COPY --from=builder --chown=0:0 /usr/share/elasticsearch /usr/share/elasticsearch
<% if (docker_base != "wolfi") { %>
COPY --from=builder --chown=0:0 /bin/tini /bin/tini
<% } %>
<% if (docker_base == 'cloud') { %>
COPY --from=builder --chown=0:0 /opt /opt
Expand Down Expand Up @@ -280,7 +301,12 @@ CMD ["/app/elasticsearch.sh"]
RUN mkdir /app && \\
echo -e '#!/bin/bash\\nexec /usr/local/bin/docker-entrypoint.sh eswrapper' > /app/elasticsearch.sh && \\
chmod 0555 /app/elasticsearch.sh

<% } else if (docker_base == "wolfi") { %>
# Our actual entrypoint is `tini`, a minimal but functional init program. It
# calls the entrypoint we provide, while correctly forwarding signals.
ENTRYPOINT ["/sbin/tini", "--", "/usr/local/bin/docker-entrypoint.sh"]
# Dummy overridable parameter parsed by entrypoint
CMD ["eswrapper"]
<% } else { %>
# Our actual entrypoint is `tini`, a minimal but functional init program. It
# calls the entrypoint we provide, while correctly forwarding signals.
Expand Down
2 changes: 2 additions & 0 deletions distribution/docker/wolfi-docker-aarch64-export/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.
2 changes: 2 additions & 0 deletions distribution/docker/wolfi-docker-export/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.
6 changes: 6 additions & 0 deletions docs/changelog/112282.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 112282
summary: Adds example plugin for custom ingest processor
area: Ingest Node
type: enhancement
issues:
- 111539
5 changes: 5 additions & 0 deletions docs/changelog/112547.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 112547
summary: Remove reduce and `reduceContext` from `DelayedBucket`
area: Aggregations
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/112612.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 112612
summary: Set `replica_unassigned_buffer_time` in constructor
area: Health
type: bug
issues: []
5 changes: 3 additions & 2 deletions docs/plugins/development/creating-classic-plugins.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,13 @@ for the plugin. If you need other resources, package them into a resources JAR.
The {es} repository contains {es-repo}tree/main/plugins/examples[examples of plugins]. Some of these include:

* a plugin with {es-repo}tree/main/plugins/examples/custom-settings[custom settings]
* a plugin with a {es-repo}tree/main/plugins/examples/custom-processor[custom ingest processor]
* adding {es-repo}tree/main/plugins/examples/rest-handler[custom rest endpoints]
* adding a {es-repo}tree/main/plugins/examples/rescore[custom rescorer]
* a script {es-repo}tree/main/plugins/examples/script-expert-scoring[implemented in Java]

These examples provide the bare bones needed to get started. For more
information about how to write a plugin, we recommend looking at the
information about how to write a plugin, we recommend looking at the
{es-repo}tree/main/plugins/[source code of existing plugins] for inspiration.

[discrete]
Expand Down Expand Up @@ -88,4 +89,4 @@ for more information.
[[plugin-descriptor-file-classic]]
==== The plugin descriptor file for classic plugins

include::plugin-descriptor-file.asciidoc[]
include::plugin-descriptor-file.asciidoc[]
2 changes: 2 additions & 0 deletions docs/reference/index-modules/similarity.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ A similarity (scoring / ranking model) defines how matching documents
are scored. Similarity is per field, meaning that via the mapping one
can define a different similarity per field.

Similarity is only applicable for text type and keyword type fields.

Configuring a custom similarity is considered an expert feature and the
builtin similarities are most likely sufficient as is described in
<<similarity>>.
Expand Down
Loading

0 comments on commit b9b0e5c

Please sign in to comment.