Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: java FFI things #2644

Merged
merged 4 commits into from
Mar 11, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions java/vortex-jni/src/main/java/dev/vortex/api/Array.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ public interface Array extends AutoCloseable {

boolean getNull(int index);

int getNullCount();

byte getByte(int index);

short getShort(int index);
Expand All @@ -43,4 +45,7 @@ public interface Array extends AutoCloseable {
String getUTF8(int index);

byte[] getBinary(int index);

@Override
void close();
}
3 changes: 3 additions & 0 deletions java/vortex-jni/src/main/java/dev/vortex/api/ArrayStream.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,7 @@ public interface ArrayStream extends AutoCloseable {
* It is an error to call this method if a previous invocation returned false.
*/
boolean next();

@Override
void close();
}
10 changes: 8 additions & 2 deletions java/vortex-jni/src/main/java/dev/vortex/impl/NativeArray.java
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,14 @@ public Array slice(int start, int stop) {

@Override
public boolean getNull(int index) {
// check validity of the array
return false;
checkNotNull(inner, "inner");
return FFI.FFIArray_is_null(inner, index);
}

@Override
public int getNullCount() {
checkNotNull(inner, "inner");
return FFI.FFIArray_null_count(inner);
}

@Override
Expand Down
4 changes: 4 additions & 0 deletions java/vortex-jni/src/main/java/dev/vortex/jni/FFI.java
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ public final class FFI {

public static native FFIArray FFIArray_slice(FFIArray array, int start, int stop);

public static native boolean FFIArray_is_null(FFIArray array, int index);

public static native int FFIArray_null_count(FFIArray array);

public static native FFIArray FFIArray_get_field(FFIArray array, int index);

public static native byte FFIArray_get_u8(FFIArray array, int index);
Expand Down
72 changes: 0 additions & 72 deletions java/vortex-jni/src/test/java/dev/vortex/jni/FFITest.java

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

import dev.vortex.api.Array;
import dev.vortex.spark.SparkTypes;
import java.io.IOException;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.sql.vectorized.ColumnVector;
import org.apache.spark.sql.vectorized.ColumnarArray;
Expand All @@ -36,7 +35,7 @@ public VortexColumnVector(Array array) {
public void close() {
try {
array.close();
} catch (IOException e) {
} catch (Exception e) {
throw new RuntimeException("Failed to close Vortex Array", e);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import dev.vortex.api.ScanOptions;
import dev.vortex.impl.NativeFile;
import dev.vortex.spark.VortexFilePartition;
import java.io.IOException;
import java.util.Objects;
import org.apache.spark.sql.connector.read.PartitionReader;
import org.apache.spark.sql.vectorized.ColumnarBatch;
Expand Down Expand Up @@ -72,7 +71,7 @@ void initNativeResources() {
}

@Override
public void close() throws IOException {
public void close() {
checkState(Objects.nonNull(file), "File was closed");
checkState(Objects.nonNull(arrayStream), "ArrayStream was closed");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;

@Disabled
final class VortexScanTest {
private static final Path TPCH_ROOT = Paths.get("/Volumes/Code/vortex/bench-vortex/data/tpch/1/vortex_compressed");

Expand Down
21 changes: 21 additions & 0 deletions vortex-ffi/src/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,27 @@ pub unsafe extern "C" fn FFIArray_slice(
Box::into_raw(Box::new(FFIArray { inner: sliced }))
}

#[unsafe(no_mangle)]
pub unsafe extern "C" fn FFIArray_is_null(array: *const FFIArray, index: u32) -> bool {
let array = &*array;
array
.inner
.is_invalid(index as usize)
.vortex_expect("FFIArray_is_null: is_invalid")
}

#[unsafe(no_mangle)]
pub unsafe extern "C" fn FFIArray_null_count(array: *const FFIArray) -> u32 {
let array = &*array;
array
.inner
.as_ref()
.invalid_count()
.vortex_expect("FFIArray_null_count: invalid count")
.try_into()
.vortex_expect("FFIArray_null_count: invalid count to u32")
}

macro_rules! ffiarray_get_ptype {
($ptype:ident) => {
paste::paste! {
Expand Down
5 changes: 2 additions & 3 deletions vortex-ffi/src/dtype.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,8 @@ pub unsafe extern "C" fn DType_new_struct(
for i in 0..len {
let name_ptr = *names.add(i as usize);
let name: Arc<str> = CStr::from_ptr(name_ptr).to_string_lossy().into();
let dtype = (**dtypes.add(i as usize)).clone();
let dtype = Box::from_raw(*dtypes.add(i as usize));
let dtype = *dtype;

rust_names.push(name);
field_dtypes.push(dtype);
Expand Down Expand Up @@ -301,8 +302,6 @@ mod tests {
let dtypes = [name, age];

let person = DType_new_struct(names.as_ptr(), dtypes.as_ptr(), 2, false);
DType_free(name);
DType_free(age);

assert_eq!(DType_get(person), DTYPE_STRUCT);
assert_eq!(DType_field_count(person), 2);
Expand Down
Loading