Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[backport to 2.x] backport PR 1329 #1330

Merged
merged 1 commit into from
Oct 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -339,12 +339,15 @@ integTest {
filter {
includeTestsMatching "org.opensearch.ad.rest.*IT"
includeTestsMatching "org.opensearch.ad.e2e.*IT"
includeTestsMatching "org.opensearch.forecast.rest.*IT"
includeTestsMatching "org.opensearch.forecast.e2e.*IT"
}
}

if (System.getProperty("https") == null || System.getProperty("https") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.rest.SecureADRestIT"
excludeTestsMatching "org.opensearch.forecast.rest.SecureForecastRestIT"
}
}

Expand Down Expand Up @@ -468,6 +471,7 @@ task integTestRemote(type: RestIntegTestTask) {
if (System.getProperty("https") == null || System.getProperty("https") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.rest.SecureADRestIT"
excludeTestsMatching "org.opensearch.forecast.rest.SecureForecastRestIT"
}
}
}
Expand Down Expand Up @@ -696,10 +700,7 @@ List<String> jacocoExclusions = [

// TODO: add test coverage (kaituo)
'org.opensearch.forecast.*',
'org.opensearch.timeseries.ml.TimeSeriesSingleStreamCheckpointDao',
'org.opensearch.timeseries.transport.JobRequest',
'org.opensearch.timeseries.transport.handler.ResultBulkIndexingHandler',
'org.opensearch.timeseries.ml.Inferencer',
'org.opensearch.timeseries.transport.SingleStreamResultRequest',
'org.opensearch.timeseries.rest.handler.IndexJobActionHandler.1',
'org.opensearch.timeseries.transport.SuggestConfigParamResponse',
Expand Down Expand Up @@ -727,8 +728,6 @@ List<String> jacocoExclusions = [
'org.opensearch.timeseries.ratelimit.RateLimitedRequestWorker',
'org.opensearch.timeseries.util.TimeUtil',
'org.opensearch.ad.transport.ADHCImputeTransportAction',
'org.opensearch.timeseries.ml.RealTimeInferencer',
'org.opensearch.timeseries.util.ExpiringValue',
]


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli
all,
RestHandlerUtils.buildEntity(request, forecasterId)
);

return channel -> client.execute(GetForecasterAction.INSTANCE, getForecasterRequest, new RestToXContentListener<>(channel));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(Encode.forHtml(e.getMessage()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,6 @@ private QueryBuilder generateBuildInSubFilter(SearchTopForecastResultRequest req
*/
private RangeQueryBuilder generateDateFilter(SearchTopForecastResultRequest request, Forecaster forecaster) {
// forecast from is data end time for forecast
// return QueryBuilders.termQuery(CommonName.DATA_END_TIME_FIELD, request.getForecastFrom().toEpochMilli());
long startInclusive = request.getForecastFrom().toEpochMilli();
long endExclusive = startInclusive + forecaster.getIntervalInMilliseconds();
return QueryBuilders.rangeQuery(CommonName.DATA_END_TIME_FIELD).gte(startInclusive).lt(endExclusive);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ public boolean process(Sample sample, ModelState<RCFModelType> modelState, Confi
return processWithTimeout(modelState, config, taskId, sample);
}

private boolean processWithTimeout(ModelState<RCFModelType> modelState, Config config, String taskId, Sample sample) {
public boolean processWithTimeout(ModelState<RCFModelType> modelState, Config config, String taskId, Sample sample) {
String modelId = modelState.getModelId();
ReentrantLock lock = (ReentrantLock) modelLocks
.computeIfAbsent(
Expand Down Expand Up @@ -175,7 +175,7 @@ private boolean processWithTimeout(ModelState<RCFModelType> modelState, Config c
return success;
}

private boolean tryProcess(Sample sample, ModelState<RCFModelType> modelState, Config config, String taskId, long curExecutionEnd) {
public boolean tryProcess(Sample sample, ModelState<RCFModelType> modelState, Config config, String taskId, long curExecutionEnd) {
String modelId = modelState.getModelId();
try {
RCFResultType result = modelManager.getResult(sample, modelState, modelId, config, taskId);
Expand Down Expand Up @@ -215,7 +215,7 @@ private boolean tryProcess(Sample sample, ModelState<RCFModelType> modelState, C
return true;
}

private void reColdStart(Config config, String modelId, Exception e, Sample sample, String taskId) {
public void reColdStart(Config config, String modelId, Exception e, Sample sample, String taskId) {
// fail to score likely due to model corruption. Re-cold start to recover.
LOG.error(new ParameterizedMessage("Likely model corruption for [{}]", modelId), e);
stats.getStat(modelCorruptionStat).increment();
Expand Down Expand Up @@ -255,6 +255,13 @@ public void maintenance() {
// will be thrown to transport broadcast handler
throw new TimeSeriesException("Fail to maintain RealTimeInferencer", e);
}
}

public Map<String, ExpiringValue<Lock>> getModelLocks() {
return modelLocks;
}

public Map<String, ExpiringValue<PriorityQueue<Sample>>> getSampleQueues() {
return sampleQueues;
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.opensearch.action.get.GetRequest;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.action.index.IndexResponse;
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.support.WriteRequest;
Expand Down Expand Up @@ -596,6 +597,9 @@ public <T> void getAndExecuteOnLatestTasks(
}, e -> {
if (e instanceof IndexNotFoundException) {
function.accept(new ArrayList<>());
} else if (e instanceof SearchPhaseExecutionException && e.getMessage().contains("No mapping found for")) {
// state index hasn't finished initialization
function.accept(new ArrayList<>());
} else {
logger.error("Failed to search task for config " + configId, e);
listener.onFailure(e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonArray;

public abstract class AnomalyDetectorRestTestCase extends ODFERestTestCase {
public static final Logger LOG = (Logger) LogManager.getLogger(AnomalyDetectorRestTestCase.class);
Expand Down Expand Up @@ -390,52 +389,6 @@ public Response searchTopAnomalyResults(String detectorId, boolean historical, S
);
}

public Response createUser(String name, String password, ArrayList<String> backendRoles) throws IOException {
JsonArray backendRolesString = new JsonArray();
for (int i = 0; i < backendRoles.size(); i++) {
backendRolesString.add(backendRoles.get(i));
}
return TestHelpers
.makeRequest(
client(),
"PUT",
"/_opendistro/_security/api/internalusers/" + name,
null,
TestHelpers
.toHttpEntity(
" {\n"
+ "\"password\": \""
+ password
+ "\",\n"
+ "\"backend_roles\": "
+ backendRolesString
+ ",\n"
+ "\"attributes\": {\n"
+ "}} "
),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
}

public Response createRoleMapping(String role, ArrayList<String> users) throws IOException {
JsonArray usersString = new JsonArray();
for (int i = 0; i < users.size(); i++) {
usersString.add(users.get(i));
}
return TestHelpers
.makeRequest(
client(),
"PUT",
"/_opendistro/_security/api/rolesmapping/" + role,
null,
TestHelpers
.toHttpEntity(
"{\n" + " \"backend_roles\" : [ ],\n" + " \"hosts\" : [ ],\n" + " \"users\" : " + usersString + "\n" + "}"
),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
}

public Response createIndexRole(String role, String index) throws IOException {
return TestHelpers
.makeRequest(
Expand Down Expand Up @@ -554,18 +507,6 @@ public Response createDlsRole(String role, String index) throws IOException {
);
}

public Response deleteUser(String user) throws IOException {
return TestHelpers
.makeRequest(
client(),
"DELETE",
"/_opendistro/_security/api/internalusers/" + user,
null,
"",
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
}

public Response deleteRoleMapping(String user) throws IOException {
return TestHelpers
.makeRequest(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,11 @@ && scoreOneResult(
verifyConfidence(testIndex, confidence, lastConfidence.get(entity));
lastConfidence.put(entity, confidence);
} else {
assertEquals(null, imputed0);
assertEquals(
String.format(Locale.ROOT, "dataStartTime: %d, missingTimestamps: %s", dataStartTime, missingTimestamps),
null,
imputed0
);
}

lastSeen.put(entity, dataValue);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import com.google.gson.JsonParser;

public abstract class AbstractRuleTestCase extends AbstractADSyntheticDataTest {
String categoricalField = "componentName";
String categoricalField = "cityName";

/**
* Ingest all of the data in file datasetName and create detector
Expand Down Expand Up @@ -97,7 +97,7 @@ protected String genDetector(String datasetName, int intervalMinutes, int trainT
Locale.ROOT,
"{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\""
+ ", \"indices\": [\"%s\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": "
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"transform._doc_count\" } } } }"
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"visitCount\" } } } }"
+ "], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }, "
+ "\"category_field\": [\"%s\"], "
+ "\"window_delay\": { \"period\": {\"interval\": %d, \"unit\": \"MINUTES\"}},"
Expand Down Expand Up @@ -140,7 +140,7 @@ protected TrainResult ingestTrainData(
"{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\":"
+ (useDateNanos ? "\"date_nanos\"" : "\"date\"")
+ "},"
+ " \"transform._doc_count\": { \"type\": \"integer\" },"
+ " \"visitCount\": { \"type\": \"integer\" },"
+ "\"%s\": { \"type\": \"keyword\"} } } }",
categoricalField
);
Expand Down
2 changes: 1 addition & 1 deletion src/test/java/org/opensearch/ad/e2e/MissingIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public abstract class MissingIT extends AbstractADSyntheticDataTest {

protected int intervalMinutes = 10;
public long intervalMillis = intervalMinutes * 60000L;
protected String categoricalField = "componentName";
protected String categoricalField = "cityName";
protected int maxError = 20;
protected int trainTestSplit = 100;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ public void testTwoFeatureSparse() throws Exception {
+ " \"aggregations\": {\n"
+ " \"max1\": {\n"
+ " \"max\": {\n"
+ " \"field\": \"transform._doc_count\"\n"
+ " \"field\": \"visitCount\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
Expand Down Expand Up @@ -99,7 +99,7 @@ public void testTwoFeatureSparse() throws Exception {
+ " \"aggregations\": {\n"
+ " \"max2\": {\n"
+ " \"max\": {\n"
+ " \"field\": \"transform._doc_count\"\n"
+ " \"field\": \"visitCount\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
Expand Down
Loading
Loading