Skip to content

Commit

Permalink
Merge pull request #101 from oracle/release_2024-03-13
Browse files Browse the repository at this point in the history
Releasing version 3.3.4.1.4.2
  • Loading branch information
yanhaizhongyu authored Mar 13, 2024
2 parents db6e983 + a51d515 commit daf46c6
Show file tree
Hide file tree
Showing 17 changed files with 213 additions and 192 deletions.
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,19 @@
All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](http://keepachangelog.com/).
## 3.3.4.1.4.2 - 2024-03-12
### Changed
- Updated OCI Java SDK version to `3.34.0`
- Updated com.fasterxml.jackson.core:jackson-databind to `2.16.0`
- Updated org.apache.avro.avro to `1.11.3`
- Use org.apache.commons.commons-text `2.16.0`
- Shaded io.netty packages

### Fixed
- Fixed external kinit OID issue and refactored SpnegoGenerator
- Fixed contract test failure AbstractContractSeekTest.testReadFullyZeroByteFile
- Fixed contract test failure AbstractContractSeekTest.testSeekReadClosedFile

## 3.3.4.1.4.1 - 2024-02-06
### Added
- Added custom authenticator which uses OKE Workload Identity authentication
Expand Down
10 changes: 2 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,6 @@ Important: The Java SDK file version that you download from the [Oracle Releases

See [CHANGELOG](/CHANGELOG.md).

## Contributing

oci-hdfs-connector is an open source project. See [CONTRIBUTING](/CONTRIBUTING.md) for details.

Oracle gratefully acknowledges the contributions to oci-hdfs-connector that have been made by the community.

## Known Issues

You can find information on any known issues with the connector [here](https://docs.cloud.oracle.com/Content/knownissues.htm) and under the “Issues” tab of this GitHub repository.
Expand All @@ -66,15 +60,15 @@ You can find information on any known issues with the connector [here](https://d

## Contributing

This project welcomes contributions from the community. Before submitting a pull request, please [review our contribution guide](./CONTRIBUTING.md)
oci-hdfs-connector is an open source project. This project welcomes contributions from the community. Before submitting a pull request, please [review our contribution guide](./CONTRIBUTING.md)

## Security

Please consult the [security guide](./SECURITY.md) for our responsible security vulnerability disclosure process

## License

Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl
or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.

Expand Down
38 changes: 0 additions & 38 deletions SECURITY.md

This file was deleted.

8 changes: 4 additions & 4 deletions hdfs-addons/hdfs-smartparquet/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
<parent>
<artifactId>oci-hdfs-addons</artifactId>
<groupId>com.oracle.oci.sdk</groupId>
<version>3.3.4.1.4.1</version>
<version>3.3.4.1.4.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>

Expand Down Expand Up @@ -149,7 +149,7 @@
<dependency>
<groupId>com.oracle.oci.sdk</groupId>
<artifactId>oci-hdfs-connector</artifactId>
<version>3.3.4.1.4.1</version>
<version>3.3.4.1.4.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down Expand Up @@ -251,7 +251,7 @@
<dependency>
<groupId>com.oracle.oci.sdk</groupId>
<artifactId>oci-hdfs-connector</artifactId>
<version>3.3.4.1.4.1</version>
<version>3.3.4.1.4.2</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
Expand Down Expand Up @@ -383,7 +383,7 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.13.4.2</version>
<version>2.16.0</version>
</dependency>
</dependencies>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
*/
@Slf4j
public class BmcSmartParquetFSInputStream extends AbstractBmcCustomFSInputStream {
private long filePos = 0;
private long dataPos = -1;
private int dataMax = 0;
private int dataCurOffset = 0;
Expand All @@ -70,21 +69,17 @@ private Cache<String, ParquetFooterInfo> configureParquetCache(String spec) {
.build();
}

@Override
public long getPos() {
return filePos;
}

@Override
public int read() throws IOException {
LOG.debug("{}: Reading single byte at position {}", this, filePos);
this.checkNotClosed();
LOG.debug("{}: Reading single byte at position {}", this, this.currentPosition);
if (dataPos == -1) {
fillBuffer(1);
}
if (dataPos == -1) {
return -1;
}
filePos++;
this.currentPosition++;
dataCurOffset++;
this.statistics.incrementBytesRead(1L);
int ret = wrappedStream.read();
Expand All @@ -97,14 +92,20 @@ public int read() throws IOException {

@Override
public int read(long position, byte[] buffer, int offset, int length) throws IOException {
this.checkNotClosed();
LOG.debug("{}: Reading {} bytes at position {}", this, length, position);
seek(position);
return read(buffer, offset, length);
}

@Override
public int read(byte[] buffer, int offset, int length) throws IOException {
LOG.debug("{}: Reading {} bytes at current position {}", this, length, filePos);
LOG.debug("{}: Reading {} bytes at current position {}", this, length, this.currentPosition);
this.checkNotClosed();
// see https://issues.apache.org/jira/browse/HDFS-10277
if (length == 0) {
return 0;
}
if (dataPos == -1) {
fillBuffer(length);
}
Expand All @@ -114,7 +115,7 @@ public int read(byte[] buffer, int offset, int length) throws IOException {
int len = Math.min(length, dataMax - dataCurOffset);
int n = wrappedStream.read(buffer, offset, len);
dataCurOffset += n;
filePos += n;
this.currentPosition += n;
this.statistics.incrementBytesRead(n);
if (dataCurOffset == dataMax) {
if (n != length) {
Expand All @@ -133,6 +134,7 @@ public void readFully(long position, byte[] buffer) throws IOException {

@Override
public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
this.checkNotClosed();
LOG.debug("{}: ReadFully {} bytes from {}", this, position, length);
seek(position);
int nBytes = Math.min((int) (status.getLen() - position), length);
Expand All @@ -149,26 +151,20 @@ public void readFully(long position, byte[] buffer, int offset, int length) thro

@Override
protected long doSeek(long pos) throws IOException {
throw new IOException("doSeek not implemented for read-ahead stream");
}

@Override
public void seek(long pos) throws IOException {
LOG.debug("{}: Seek to {}", this, pos);
if (dataPos == -1) {
filePos = pos;
return;
this.currentPosition = pos;
return pos;
}
if (pos < filePos) {
if (pos < this.currentPosition) {
dataPos = -1;
filePos = pos;
this.currentPosition = pos;
closeWrapped();
} else if (pos >= (dataPos + dataMax)) {
dataPos = -1;
filePos = pos;
this.currentPosition = pos;
closeWrapped();
} else {
int len = (int) (pos - filePos);
int len = (int) (pos - this.currentPosition);
byte[] b = new byte[len];
while (len > 0) {
int n = read(b, 0, len);
Expand All @@ -178,6 +174,7 @@ public void seek(long pos) throws IOException {
len -= n;
}
}
return pos;
}

@Override
Expand Down Expand Up @@ -221,10 +218,10 @@ public static class ParquetFooterInfo {
}

private void fillBuffer(long requested) throws IOException {
LOG.debug("{}: Filling buffer at {} length {}", this, filePos, status.getLen());
long start = filePos;
LOG.debug("{}: Filling buffer at {} length {}", this, this.currentPosition, status.getLen());
long start = this.currentPosition;
// Read at least blocksize
long end = filePos + Math.max(requested, ociReadAheadBlockSize);
long end = this.currentPosition + Math.max(requested, ociReadAheadBlockSize);
// But don't read past the end
end = Math.min(end, status.getLen());
if (end == start) {
Expand Down Expand Up @@ -340,14 +337,14 @@ private void fillBuffer(long requested) throws IOException {
status.getLen());
GetObjectResponse response = objectStorage.getObject(modRequest);
wrappedStream = response.getInputStream();
dataPos = filePos;
dataPos = this.currentPosition;
dataMax = len;
dataCurOffset = 0;
LOG.debug(
"{}: After filling, dataPos {}, filePos {}, dataMax {}",
"{}: After filling, dataPos {}, this.currentPosition {}, dataMax {}",
this,
dataPos,
filePos,
this.currentPosition,
dataMax);
}

Expand Down
2 changes: 1 addition & 1 deletion hdfs-addons/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
<parent>
<artifactId>oci-hdfs</artifactId>
<groupId>com.oracle.oci.sdk</groupId>
<version>3.3.4.1.4.1</version>
<version>3.3.4.1.4.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>

Expand Down
8 changes: 4 additions & 4 deletions hdfs-connector/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
<parent>
<groupId>com.oracle.oci.sdk</groupId>
<artifactId>oci-hdfs</artifactId>
<version>3.3.4.1.4.1</version>
<version>3.3.4.1.4.2</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down Expand Up @@ -539,17 +539,17 @@
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<version>4.1.77.Final</version>
<version>${netty.version}</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec-http</artifactId>
<version>4.1.86.Final</version>
<version>${netty.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.13.4.2</version>
<version>2.16.0</version>
</dependency>
<!-- Specifically pick the version used by the SDK so contract tests work -->
<dependency>
Expand Down
Loading

0 comments on commit daf46c6

Please sign in to comment.