Skip to content

Commit

Permalink
Use a Deque to avoid StackOverflow in ImmediateJobScheduler (#5077)
Browse files Browse the repository at this point in the history
Co-authored-by: Ryan Caudy <[email protected]>
  • Loading branch information
nbauernfeind and rcaudy authored Jan 26, 2024
1 parent 2f0355a commit 3a503a9
Show file tree
Hide file tree
Showing 7 changed files with 44 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class InitialFilterExecution extends AbstractFilterExecution {
if (permitParallelization) {
jobScheduler = new OperationInitializerJobScheduler();
} else {
jobScheduler = ImmediateJobScheduler.INSTANCE;
jobScheduler = new ImmediateJobScheduler();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1479,7 +1479,7 @@ this, mode, columns, rowSet, getModifiedColumnSetForUpdates(), publishTheseSourc
&& analyzer.allowCrossColumnParallelization()) {
jobScheduler = new OperationInitializerJobScheduler();
} else {
jobScheduler = ImmediateJobScheduler.INSTANCE;
jobScheduler = new ImmediateJobScheduler();
}

final QueryTable resultTable;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ public void onUpdate(final TableUpdate upstream) {
if (enableParallelUpdate) {
jobScheduler = new UpdateGraphJobScheduler(getUpdateGraph());
} else {
jobScheduler = ImmediateJobScheduler.INSTANCE;
jobScheduler = new ImmediateJobScheduler();
}

analyzer.applyUpdate(acquiredUpdate, toClear, updateHelper, jobScheduler, this,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ private ListenerFilterExecution(
if (permitParallelization) {
jobScheduler = new UpdateGraphJobScheduler(getUpdateGraph());
} else {
jobScheduler = ImmediateJobScheduler.INSTANCE;
jobScheduler = new ImmediateJobScheduler();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ public Result<QueryTable> initialize(final boolean usePrev, final long beforeClo
if (ExecutionContext.getContext().getOperationInitializer().canParallelize()) {
jobScheduler = new OperationInitializerJobScheduler();
} else {
jobScheduler = ImmediateJobScheduler.INSTANCE;
jobScheduler = new ImmediateJobScheduler();
}

final ExecutionContext executionContext = ExecutionContext.newBuilder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ class PhasedUpdateProcessor implements LogOutputAppendable {
if (ExecutionContext.getContext().getOperationInitializer().canParallelize()) {
jobScheduler = new OperationInitializerJobScheduler();
} else {
jobScheduler = ImmediateJobScheduler.INSTANCE;
jobScheduler = new ImmediateJobScheduler();
}
executionContext = ExecutionContext.newBuilder()
.markSystemic().build();
Expand Down Expand Up @@ -331,7 +331,7 @@ class PhasedUpdateProcessor implements LogOutputAppendable {
if (source.getUpdateGraph().parallelismFactor() > 1) {
jobScheduler = new UpdateGraphJobScheduler(source.getUpdateGraph());
} else {
jobScheduler = ImmediateJobScheduler.INSTANCE;
jobScheduler = new ImmediateJobScheduler();
}
executionContext = ExecutionContext.newBuilder()
.setUpdateGraph(result().getUpdateGraph())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,30 +3,55 @@
import io.deephaven.base.log.LogOutputAppendable;
import io.deephaven.engine.context.ExecutionContext;
import io.deephaven.engine.table.impl.perf.BasePerformanceEntry;
import io.deephaven.io.log.impl.LogOutputStringImpl;
import io.deephaven.util.SafeCloseable;
import io.deephaven.util.process.ProcessEnvironment;

import java.util.ArrayDeque;
import java.util.Deque;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import java.util.function.Consumer;

public class ImmediateJobScheduler implements JobScheduler {
public static final ImmediateJobScheduler INSTANCE = new ImmediateJobScheduler();

private volatile Thread processingThread;
private static final AtomicReferenceFieldUpdater<ImmediateJobScheduler, Thread> PROCESSING_THREAD_UPDATER =
AtomicReferenceFieldUpdater.newUpdater(ImmediateJobScheduler.class, Thread.class, "processingThread");

private final Deque<Runnable> pendingJobs = new ArrayDeque<>();

@Override
public void submit(
final ExecutionContext executionContext,
final Runnable runnable,
final LogOutputAppendable description,
final Consumer<Exception> onError) {
// We do not need to install the update context since we are not changing thread contexts.
try (SafeCloseable ignored = executionContext != null ? executionContext.open() : null) {
runnable.run();
} catch (Exception e) {
onError.accept(e);
} catch (Error e) {
final String logMessage = new LogOutputStringImpl().append(description).append(" Error").toString();
ProcessEnvironment.getGlobalFatalErrorReporter().report(logMessage, e);
throw e;
final Thread thisThread = Thread.currentThread();
final boolean thisThreadIsProcessing = processingThread == thisThread;

if (!thisThreadIsProcessing && !PROCESSING_THREAD_UPDATER.compareAndSet(this, null, thisThread)) {
throw new IllegalCallerException("An unexpected thread submitted a job to this job scheduler");
}

pendingJobs.addLast(() -> {
// We do not need to install the update context since we are not changing thread contexts.
try (SafeCloseable ignored = executionContext != null ? executionContext.open() : null) {
runnable.run();
} catch (Exception e) {
onError.accept(e);
}
});

if (thisThreadIsProcessing) {
// We're already draining the queue in an ancestor stack frame
return;
}

try {
Runnable job;
while ((job = pendingJobs.pollLast()) != null) {
job.run();
}
} finally {
PROCESSING_THREAD_UPDATER.set(this, null);
}
}

Expand Down

0 comments on commit 3a503a9

Please sign in to comment.