Skip to content

Commit

Permalink
DRILL-8452: Library upgrades (apache#2823)
Browse files Browse the repository at this point in the history
  • Loading branch information
jnturton authored and cgivre committed Nov 2, 2023
1 parent 5d4b9cd commit 0ff99dd
Show file tree
Hide file tree
Showing 33 changed files with 362 additions and 335 deletions.
2 changes: 1 addition & 1 deletion contrib/storage-elasticsearch/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-client</artifactId>
<version>8.6.0</version>
<version>8.9.1</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public class TestElasticsearchSuite extends BaseTest {
protected static ElasticsearchContainer elasticsearch;
public static final String ELASTICSEARCH_USERNAME = "elastic";
public static final String ELASTICSEARCH_PASSWORD = "s3cret";
private static final String IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch:8.6.0";
private static final String IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch:8.9.1";

private static final AtomicInteger initCount = new AtomicInteger(0);

Expand Down Expand Up @@ -164,7 +164,8 @@ private static void startElasticsearch() throws GeneralSecurityException {
.withPassword(ELASTICSEARCH_PASSWORD)
.withEnv("xpack.security.enabled", "true")
.withEnv("xpack.security.transport.ssl.enabled", "false")
.withEnv("ES_JAVA_OPTS", "-Xmx1g"); // ES gobbles up lots of RAM under defaults.
.withEnv("discovery.type", "single-node")
.withEnv("ES_JAVA_OPTS", "-Xmx2g"); // ES gobbles up lots of RAM under defaults.

HttpsURLConnection.setDefaultSSLSocketFactory(SslUtils.trustAllSSLContext().getSocketFactory());
elasticsearch.start();
Expand Down
4 changes: 0 additions & 4 deletions contrib/storage-hive/core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -257,10 +257,6 @@
<artifactId>reload4j</artifactId>
<groupId>ch.qos.reload4j</groupId>
</exclusion>
<exclusion>
<groupId>org.mortbay.jetty</groupId>
<artifactId>servlet-api-2.5</artifactId>
</exclusion>
<exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-compiler</artifactId>
Expand Down
2 changes: 1 addition & 1 deletion contrib/storage-kafka/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

<properties>
<kafka.version>2.8.2</kafka.version>
<kafka.TestSuite>**/TestKafkaSuit.class</kafka.TestSuite>
<kafka.TestSuite>**/TestKafkaSuite.class</kafka.TestSuite>
</properties>

<repositories>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
import org.junit.Test;
import org.junit.experimental.categories.Category;

import static org.apache.drill.exec.store.kafka.TestKafkaSuit.NUM_JSON_MSG;
import static org.apache.drill.exec.store.kafka.TestKafkaSuit.embeddedKafkaCluster;
import static org.apache.drill.exec.store.kafka.TestKafkaSuite.NUM_JSON_MSG;
import static org.apache.drill.exec.store.kafka.TestKafkaSuite.embeddedKafkaCluster;
import static org.junit.Assert.assertEquals;

@Category({KafkaStorageTest.class, SlowTest.class})
Expand All @@ -36,7 +36,7 @@ public class KafkaFilterPushdownTest extends KafkaTestBase {

@BeforeClass
public static void setup() throws Exception {
TestKafkaSuit.createTopicHelper(TestQueryConstants.JSON_PUSHDOWN_TOPIC, NUM_PARTITIONS);
TestKafkaSuite.createTopicHelper(TestQueryConstants.JSON_PUSHDOWN_TOPIC, NUM_PARTITIONS);
KafkaMessageGenerator generator = new KafkaMessageGenerator(embeddedKafkaCluster.getKafkaBrokerList(),
StringSerializer.class);
generator.populateJsonMsgWithTimestamps(TestQueryConstants.JSON_PUSHDOWN_TOPIC, NUM_JSON_MSG);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ public void populateAvroMsgIntoKafka(String topic, int numMsg) {
String key1Schema = "{\"type\":\"record\"," +
"\"name\":\"key1record\"," +
"\"fields\":[" +
"{\"name\":\"key1\",\"type\":\"string\"}]}\"";
"{\"name\":\"key1\",\"type\":\"string\"}]}";
Schema keySchema = parser.parse(key1Schema);
GenericRecordBuilder keyBuilder = new GenericRecordBuilder(keySchema);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
import java.util.Map;
import java.util.Set;

import static org.apache.drill.exec.store.kafka.TestKafkaSuit.embeddedKafkaCluster;
import static org.apache.drill.exec.store.kafka.TestKafkaSuite.embeddedKafkaCluster;
import static org.junit.Assert.fail;

@FixMethodOrder(MethodSorters.JVM)
Expand Down Expand Up @@ -76,7 +76,7 @@ public void testResultLimit() throws Exception {
@Test
public void testResultCount() {
String queryString = String.format(TestQueryConstants.MSG_SELECT_QUERY, TestQueryConstants.JSON_TOPIC);
runKafkaSQLVerifyCount(queryString, TestKafkaSuit.NUM_JSON_MSG);
runKafkaSQLVerifyCount(queryString, TestKafkaSuite.NUM_JSON_MSG);
}

@Test
Expand All @@ -91,7 +91,7 @@ public void testAvroResultCount() {
KafkaAvroDeserializer.class.getName());

String queryString = String.format(TestQueryConstants.MSG_SELECT_QUERY, TestQueryConstants.AVRO_TOPIC);
runKafkaSQLVerifyCount(queryString, TestKafkaSuit.NUM_JSON_MSG);
runKafkaSQLVerifyCount(queryString, TestKafkaSuite.NUM_JSON_MSG);
} finally {
client.resetSession(ExecConstants.KAFKA_RECORD_READER);
}
Expand Down Expand Up @@ -209,7 +209,7 @@ public void testPhysicalPlanSubmissionAvro() throws Exception {
@Test
public void testOneMessageTopic() throws Exception {
String topicName = "topicWithOneMessage";
TestKafkaSuit.createTopicHelper(topicName, 1);
TestKafkaSuite.createTopicHelper(topicName, 1);
KafkaMessageGenerator generator = new KafkaMessageGenerator(embeddedKafkaCluster.getKafkaBrokerList(), StringSerializer.class);
generator.populateMessages(topicName, "{\"index\": 1}");

Expand All @@ -224,7 +224,7 @@ public void testOneMessageTopic() throws Exception {
@Test
public void testMalformedRecords() throws Exception {
String topicName = "topicWithMalFormedMessages";
TestKafkaSuit.createTopicHelper(topicName, 1);
TestKafkaSuite.createTopicHelper(topicName, 1);
try {
KafkaMessageGenerator generator = new KafkaMessageGenerator(embeddedKafkaCluster.getKafkaBrokerList(), StringSerializer.class);
generator.populateMessages(topicName, "Test");
Expand Down Expand Up @@ -259,7 +259,7 @@ public void testMalformedRecords() throws Exception {
@Test
public void testNanInf() throws Exception {
String topicName = "topicWithNanInf";
TestKafkaSuit.createTopicHelper(topicName, 1);
TestKafkaSuite.createTopicHelper(topicName, 1);
try {
KafkaMessageGenerator generator = new KafkaMessageGenerator(embeddedKafkaCluster.getKafkaBrokerList(), StringSerializer.class);
generator.populateMessages(topicName, "{\"nan_col\":NaN, \"inf_col\":Infinity}");
Expand Down Expand Up @@ -287,7 +287,7 @@ public void testNanInf() throws Exception {
@Test
public void testEscapeAnyChar() throws Exception {
String topicName = "topicWithEscapeAnyChar";
TestKafkaSuit.createTopicHelper(topicName, 1);
TestKafkaSuite.createTopicHelper(topicName, 1);
try {
KafkaMessageGenerator generator = new KafkaMessageGenerator(embeddedKafkaCluster.getKafkaBrokerList(), StringSerializer.class);
generator.populateMessages(topicName, "{\"name\": \"AB\\\"\\C\"}");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,11 @@ public class KafkaTestBase extends ClusterTest {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// Make sure this test is only running as part of the suit
Assume.assumeTrue(TestKafkaSuit.isRunningSuite());
Assume.assumeTrue(TestKafkaSuite.isRunningSuite());
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
startCluster(builder);
TestKafkaSuit.initKafka();
initKafkaStoragePlugin(TestKafkaSuit.embeddedKafkaCluster);
TestKafkaSuite.initKafka();
initKafkaStoragePlugin(TestKafkaSuite.embeddedKafkaCluster);
}

public static void initKafkaStoragePlugin(EmbeddedKafkaCluster embeddedKafkaCluster) throws Exception {
Expand Down Expand Up @@ -76,8 +76,8 @@ public static long testSql(String sql) {

@AfterClass
public static void tearDownKafkaTestBase() {
if (TestKafkaSuit.isRunningSuite()) {
TestKafkaSuit.tearDownCluster();
if (TestKafkaSuite.isRunningSuite()) {
TestKafkaSuite.tearDownCluster();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public void setUp() {
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
consumerProps.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "4");
kafkaConsumer = new KafkaConsumer<>(consumerProps);
subScanSpec = new KafkaPartitionScanSpec(TestQueryConstants.JSON_TOPIC, 0, 0, TestKafkaSuit.NUM_JSON_MSG);
subScanSpec = new KafkaPartitionScanSpec(TestQueryConstants.JSON_TOPIC, 0, 0, TestKafkaSuite.NUM_JSON_MSG);
}

@After
Expand Down Expand Up @@ -105,6 +105,6 @@ public void testShouldReturnMessage2() {
Assert.assertNotNull(consumerRecord);
++messageCount;
}
Assert.assertEquals(TestKafkaSuit.NUM_JSON_MSG, messageCount);
Assert.assertEquals(TestKafkaSuite.NUM_JSON_MSG, messageCount);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,11 @@
@Category({KafkaStorageTest.class, SlowTest.class})
@RunWith(Suite.class)
@SuiteClasses({KafkaQueriesTest.class, MessageIteratorTest.class, MessageReaderFactoryTest.class, KafkaFilterPushdownTest.class})
public class TestKafkaSuit extends BaseTest {
public class TestKafkaSuite extends BaseTest {

private static final Logger logger = LoggerFactory.getLogger(TestKafkaSuit.class);
private static final Logger logger = LoggerFactory.getLogger(TestKafkaSuite.class);

private static final String LOGIN_CONF_RESOURCE_PATHNAME = "login.conf";
private static final String LOGIN_CONF_RESOURCE_PATHNAME = "login.jaasconf";

public static EmbeddedKafkaCluster embeddedKafkaCluster;

Expand All @@ -76,7 +76,7 @@ public class TestKafkaSuit extends BaseTest {

@BeforeClass
public static void initKafka() throws Exception {
synchronized (TestKafkaSuit.class) {
synchronized (TestKafkaSuite.class) {
if (initCount.get() == 0) {
ZookeeperTestUtil.setZookeeperSaslTestConfigProps();
System.setProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM, ClassLoader.getSystemResource(LOGIN_CONF_RESOURCE_PATHNAME).getFile());
Expand Down Expand Up @@ -104,7 +104,7 @@ public static boolean isRunningSuite() {

@AfterClass
public static void tearDownCluster() {
synchronized (TestKafkaSuit.class) {
synchronized (TestKafkaSuite.class) {
if (initCount.decrementAndGet() == 0) {
if (zkClient != null) {
zkClient.close();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@
hadoop_simple {
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required;
};
};
File renamed without changes.
File renamed without changes.
File renamed without changes.
4 changes: 2 additions & 2 deletions distribution/src/assemble/component.xml
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@
<include>org.glassfish.jersey.core</include>
<include>org.reflections</include>
<include>org.glassfish.hk2.external</include>
<include>org.mortbay.jetty</include>
<include>org.eclipse.jetty</include>
<include>javax.activation</include>
<include>javax.annotation</include>
<include>org.glassfish.jersey.containers</include>
Expand Down Expand Up @@ -155,7 +155,7 @@
<exclude>org.glassfish.jersey.core</exclude>
<exclude>org.reflections</exclude>
<exclude>org.glassfish.hk2.external</exclude>
<exclude>org.mortbay.jetty</exclude>
<exclude>org.eclipse.jetty</exclude>
<exclude>javax.activation</exclude>
<exclude>javax.annotation</exclude>
<exclude>org.glassfish.jersey.containers</exclude>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,17 @@
import java.util.Map;

import org.apache.drill.yarn.appMaster.TaskSpec;
import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.typesafe.config.Config;
import com.typesafe.config.ConfigList;
import com.typesafe.config.ConfigValue;

public class ClusterDef {

private static Logger logger = LoggerFactory.getLogger(ClusterDef.class);

// The following keys are relative to the cluster group definition

public static final String GROUP_NAME = "name";
Expand Down Expand Up @@ -139,7 +143,7 @@ public LabeledGroup(Map<String, Object> pool, int index) {
super(pool, index, GroupType.LABELED);
drillbitLabelExpr = (String) pool.get(DRILLBIT_LABEL);
if (drillbitLabelExpr == null) {
Log.warn("Labeled pool is missing the drillbit label expression ("
logger.warn("Labeled pool is missing the drillbit label expression ("
+ DRILLBIT_LABEL + "), will treat pool as basic.");
}
}
Expand Down
9 changes: 2 additions & 7 deletions exec/java-exec/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -359,9 +359,8 @@
<artifactId>protobuf-java</artifactId>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<version>6.1.26</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
Expand Down Expand Up @@ -442,10 +441,6 @@
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
Expand Down Expand Up @@ -845,9 +840,9 @@
<goal>unpack</goal>
</goals>
<configuration>
<overWrite>false</overWrite>
<artifactItems>
<artifactItem>
<overWrite>false</overWrite>
<groupId>${calcite.groupId}</groupId>
<artifactId>calcite-core</artifactId>
<type>jar</type>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import org.codehaus.janino.Java.CompilationUnit;
import org.codehaus.janino.Parser;
import org.codehaus.janino.Scanner;
import org.mortbay.util.IO;
import org.eclipse.jetty.util.IO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.eclipse.jetty.security.DefaultIdentityService;
import org.eclipse.jetty.security.SpnegoLoginService;
import org.eclipse.jetty.server.UserIdentity;
import org.eclipse.jetty.util.B64Code;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException;
Expand All @@ -42,6 +41,7 @@
import java.lang.reflect.Field;
import java.security.Principal;
import java.security.PrivilegedExceptionAction;
import java.util.Base64;

/**
* Custom implementation of DrillSpnegoLoginService to avoid the need of passing targetName in a config file,
Expand Down Expand Up @@ -94,7 +94,7 @@ public UserIdentity login(final String username, final Object credentials, Servl
private UserIdentity spnegoLogin(Object credentials, ServletRequest request) {

String encodedAuthToken = (String) credentials;
byte[] authToken = B64Code.decode(encodedAuthToken);
byte[] authToken = Base64.getDecoder().decode(encodedAuthToken);
GSSManager manager = GSSManager.getInstance();

try {
Expand Down Expand Up @@ -149,4 +149,3 @@ private UserIdentity spnegoLogin(Object credentials, ServletRequest request) {
return null;
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,14 @@ public SslContextFactoryConfigurator(DrillConfig config, String drillbitEndpoint
* @return new configured sslContextFactory
* @throws Exception when generation of self-signed certificate failed
*/
public SslContextFactory configureNewSslContextFactory() throws Exception {
public SslContextFactory.Server configureNewSslContextFactory() throws Exception {
SSLConfig sslConf = new SSLConfigBuilder()
.config(config)
.mode(SSLConfig.Mode.SERVER)
.initializeSSLContext(false)
.validateKeyStore(true)
.build();
final SslContextFactory sslContextFactory = new SslContextFactory();
SslContextFactory.Server sslContextFactory = new SslContextFactory.Server();
if (sslConf.isSslValid()) {
useOptionsConfiguredByUser(sslContextFactory, sslConf);
} else {
Expand All @@ -82,7 +82,7 @@ public SslContextFactory configureNewSslContextFactory() throws Exception {
return sslContextFactory;
}

private void useOptionsConfiguredByUser(SslContextFactory sslFactory, SSLConfig sslConf) {
private void useOptionsConfiguredByUser(SslContextFactory.Server sslFactory, SSLConfig sslConf) {
logger.info("Using configured SSL settings for web server");
sslFactory.setKeyStorePath(sslConf.getKeyStorePath());
sslFactory.setKeyStorePassword(sslConf.getKeyStorePassword());
Expand Down Expand Up @@ -159,7 +159,7 @@ private <T> void setIfPresent(String optKey, Function<String, T> optGet, Consume
}


private void useAutoGeneratedSelfSignedCertificate(SslContextFactory sslContextFactory) throws Exception {
private void useAutoGeneratedSelfSignedCertificate(SslContextFactory.Server sslContextFactory) throws Exception {
logger.info("Using generated self-signed SSL settings for web server");
final SecureRandom random = new SecureRandom();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

public class ZookeeperTestUtil {

private static final String LOGIN_CONF_RESOURCE_PATHNAME = "login.conf";
private static final String LOGIN_CONF_RESOURCE_PATHNAME = "login.jaasconf";

/**
* Sets zookeeper server and client SASL test config properties.
Expand Down
Loading

0 comments on commit 0ff99dd

Please sign in to comment.