Skip to content

Commit

Permalink
Merge pull request #60 from scalyr/log4JUpdate
Browse files Browse the repository at this point in the history
Address Log4J Arbitrary Code Execution Exploit
  • Loading branch information
Kami authored Dec 13, 2021
2 parents 048a7c2 + eb56a62 commit 0e34d82
Show file tree
Hide file tree
Showing 7 changed files with 54 additions and 52 deletions.
20 changes: 6 additions & 14 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ version: 2.1
parameters:
scalyr_sink_version: # scalyr_sink_version needs to be updated on every new version release.
type: string
default: "1.3"
default: "1.4"

jobs: # a collection of steps
build: # runs not using Workflows must have a `build` job as entry point

working_directory: ~/circleci-kafka-connect-scalyr # directory where steps will run

docker: # run the steps with Docker
- image: circleci/openjdk:8-jdk-stretch # ...with this image as the primary container; this is where all `steps` will run
- image: cimg/openjdk:11.0 # ...with this image as the primary container; this is where all `steps` will run

steps: # a collection of executable commands

Expand All @@ -39,14 +39,6 @@ jobs: # a collection of steps

- setup_remote_docker

- run: # Python dependencies for System Test
name: Python Dependencies
command: |
set -x
sudo apt update
sudo apt install python-pip
pip install requests
- run:
name: System Test
command: |
Expand All @@ -61,10 +53,10 @@ jobs: # a collection of steps
.circleci/configure_scalyr_connector.sh
# Verify logs are in Scalyr
python .circleci/verify_scalyr_events.py dataset=\'accesslog\'
python .circleci/verify_scalyr_events.py app=\'customApp\'
python .circleci/verify_scalyr_events.py tag=\'fluentd-apache\'
python .circleci/verify_scalyr_events.py tag=\'fluentbit-cpu\' 50
python3 .circleci/verify_scalyr_events.py dataset=\'accesslog\'
python3 .circleci/verify_scalyr_events.py app=\'customApp\'
python3 .circleci/verify_scalyr_events.py tag=\'fluentd-apache\'
python3 .circleci/verify_scalyr_events.py tag=\'fluentbit-cpu\' 50
- store_test_results: # uploads the test metadata from the `target/surefire-reports` directory so that it can show up in the CircleCI dashboard.
# Upload test results for display in Test Summary: https://circleci.com/docs/2.0/collect-test-data/
Expand Down
7 changes: 4 additions & 3 deletions .circleci/verify_scalyr_events.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Copyright 2014-2020 Scalyr Inc.
#!/usr/bin/env python3
# Copyright 2014-2021 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -106,10 +107,10 @@ def check_scalyr_events(additional_filter, expected_num_events):

print("Query returned {0} Scalyr events".format(matches))
if not has_expected_session_attrs:
print "Session attributes incorrect!"
print("Session attributes incorrect!")

if not has_expected_event_attrs:
print "Event attributes incorrect!"
print("Event attributes incorrect!")

return matches == expected_num_events and has_expected_session_attrs and has_expected_event_attrs

Expand Down
32 changes: 20 additions & 12 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,22 +1,30 @@
# Kafka Connect Scalyr Sink Connector Changes by Release

## 1.4

* Address Log4J Arbitrary Code Execution exploit by upgrading to log4j v2.15.0. For more information on the vulnerability
see upstream log4j CVE https://nvd.nist.gov/vuln/detail/CVE-2021-44228.

## 1.3

Features
* Regular expression support for custom application event mapping `matcher.value`.
* `matchAll` support for custom application event mapping matcher.

## 1.2

* Allow not specifying application attribute fields in custom application event mappings when `send_entire_record` is `true`.
* Change default `batch_send_size_bytes` to 5 MB.

## 1.1
Performance improvements for JSON serialization.

## 1.0

Initial Release

Features:
* Supports Elastic Filebeat log messages with automatic conversion of Filebeat log messages to Scalyr log events.
* Supports custom application log messages using user defined conversion of message fields to Scalyr log event attributes.
* Supports Fluentd and Fluent Bit using custom application event mappings.
* Exactly once delivery using the topic, partition, and offset to uniquely identify events and prevent duplicate delivery.

## 1.1
Performance improvements for JSON serialization.

## 1.2
* Allow not specifying application attribute fields in custom application event mappings when `send_entire_record` is `true`.
* Change default `batch_send_size_bytes` to 5 MB.

## 1.3
Features
* Regular expression support for custom application event mapping `matcher.value`.
* `matchAll` support for custom application event mapping matcher.
12 changes: 6 additions & 6 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@
version: '3'
services:
zookeeper:
image: confluentinc/cp-zookeeper:5.4.3
image: confluentinc/cp-zookeeper:6.1.0
container_name: zookeeper
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000

kafka:
image: confluentinc/cp-kafka:5.4.3
image: confluentinc/cp-kafka:6.1.0
container_name: kafka
depends_on:
- zookeeper
Expand All @@ -30,14 +30,14 @@ services:
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1

kafka-setup:
image: confluentinc/cp-kafka:5.4.3
image: confluentinc/cp-kafka:6.1.0
depends_on:
- kafka
command: "bash -c 'echo Waiting for Kafka to be ready... && \
cub kafka-ready -z zookeeper:2181 1 30 && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic connect-config-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic connect-offset-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic connect-status-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --config cleanup.policy=compact --topic connect-config-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --config cleanup.policy=compact --topic connect-offset-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --config cleanup.policy=compact --topic connect-status-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic logs'"
environment:
KAFKA_BROKER_ID: 1
Expand Down
25 changes: 13 additions & 12 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@

<groupId>com.scalyr.integrations.kafka</groupId>
<artifactId>kafka-connect-scalyr-sink</artifactId>
<version>1.3</version>
<version>1.4</version>
<packaging>jar</packaging>

<name>kafka-connect-scalyr-sink</name>

<properties>
<kafka.version>2.4.1</kafka.version>
<junit.version>4.13.1</junit.version>
<junit.version>4.13.2</junit.version>
<sonar.projectKey>scalyr_kafka-connect-scalyr</sonar.projectKey>
<sonar.organization>scalyr</sonar.organization>
<sonar.host.url>https://sonarcloud.io</sonar.host.url>
Expand Down Expand Up @@ -44,7 +44,7 @@
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>29.0-jre</version>
<version>31.0.1-jre</version>
</dependency>
<dependency>
<groupId>com.github.luben</groupId>
Expand Down Expand Up @@ -76,21 +76,22 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.32</version>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.15.0</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.32</version>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.15.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.2.0</version>
<configuration>
<archive>
<manifest>
Expand All @@ -106,13 +107,13 @@
<version>2.5.1</version>
<inherited>true</inherited>
<configuration>
<source>8</source>
<target>8</target>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.5.3</version>
<version>3.3.0</version>
<configuration>
<descriptors>
<descriptor>src/main/assembly/package.xml</descriptor>
Expand Down
3 changes: 1 addition & 2 deletions src/test/SystemTest/docker/kafka-connect/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Create Kafka Connect image with Scalyr Sink Connector
#FROM confluentinc/cp-kafka-connect-base:latest # latest build doesn't even start correctly without our connector
FROM confluentinc/cp-kafka-connect:5.5.1-1-ubi8
FROM confluentinc/cp-kafka-connect:6.1.0-1-ubi8
RUN mkdir -p /etc/kafka-connect/jars/kafka-connect-scalyr-sink
COPY target/kafka-connect-scalyr-sink-latest-package/share/java/kafka-connect-scalyr-sink /etc/kafka-connect/jars/kafka-connect-scalyr-sink
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import okhttp3.mockwebserver.RecordedRequest;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.config.Configurator;
import org.asynchttpclient.AsyncHttpClient;
import org.junit.After;
import org.junit.Before;
Expand All @@ -47,6 +47,7 @@
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;

import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
Expand Down Expand Up @@ -103,7 +104,7 @@ public void setup() {
this.deflateCompressor = CompressorFactory.getCompressor(CompressorFactory.DEFLATE, 3);

// We disable payload logging so we don't get very large raw payload messages in the log output
LogManager.getLogger("com.scalyr.integrations.kafka.eventpayload").setLevel(Level.OFF);
Configurator.setLevel("com.scalyr.integrations.kafka.eventpayload", Level.OFF);
}

@After
Expand Down

0 comments on commit 0e34d82

Please sign in to comment.