Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: rhizomik/rhizomerAPI
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: 0.2.1
Choose a base ref
...
head repository: rhizomik/rhizomerAPI
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: master
Choose a head ref
  • 8 commits
  • 6 files changed
  • 1 contributor

Commits on Sep 20, 2022

  1. Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    3d88be7 View commit details

Commits on Sep 21, 2022

  1. Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    c844e94 View commit details

Commits on Sep 22, 2022

  1. Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    0fb9934 View commit details
  2. Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    edbc2e5 View commit details

Commits on Sep 27, 2022

  1. Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    4f2f7f2 View commit details

Commits on Nov 5, 2022

  1. Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    45aa8b8 View commit details

Commits on Nov 13, 2022

  1. Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    db21b97 View commit details
  2. API method to retrieve individual facet values

    Used for those filtered but not present in the 10 most common
    rogargon committed Nov 13, 2022

    Verified

    This commit was signed with the committer’s verified signature. The key has expired.
    rogargon Roberto García
    Copy the full SHA
    5f31691 View commit details
Original file line number Diff line number Diff line change
@@ -74,6 +74,22 @@ public class RangeController {
return analiseDataset.retrieveRangeValues(dataset, facetRange, filters, page, size);
}

@RequestMapping(method = RequestMethod.GET,
value = "/datasets/{datasetId}/classes/{classCurie}/facets/{facetCurie}/ranges/{rangeCurie}")
@ResponseStatus(HttpStatus.OK)
public @ResponseBody Value getRangeValue(@PathVariable String datasetId, @PathVariable String classCurie,
@PathVariable String facetCurie, @PathVariable String rangeCurie,
@RequestParam MultiValueMap<String, String> filters, Authentication auth,
@RequestParam(value="value") String value) {
Dataset dataset = getDataset(datasetId);
securityController.checkPublicOrOwner(dataset, auth);
Class datasetClass = getClass(classCurie, dataset);
Facet classFacet = getFacet(facetCurie, datasetClass.getId());
Range facetRange = getRange(rangeCurie, classFacet);
filters.remove("value");
return analiseDataset.retrieveFacetRangeValueLabelAndCount(dataset, facetRange, value, filters);
}

@RequestMapping(method = RequestMethod.GET,
value = "/datasets/{datasetId}/classes/{classCurie}/facets/{facetCurie}/ranges/{rangeCurie}/valuesContaining")
public @ResponseBody List<Value> getRangeValuesContaining(@PathVariable String datasetId,
2 changes: 2 additions & 0 deletions src/main/java/net/rhizomik/rhizomer/model/SPARQLEndPoint.java
Original file line number Diff line number Diff line change
@@ -37,6 +37,8 @@ public class SPARQLEndPoint {
private ServerType type = ServerType.GENERIC;
public enum ServerType {
GENERIC,
FUSEKI,
FUSEKI_LUCENE,
VIRTUOSO,
MARKLOGIC,
STARDOG;
74 changes: 59 additions & 15 deletions src/main/java/net/rhizomik/rhizomer/service/AnalizeDataset.java
Original file line number Diff line number Diff line change
@@ -180,9 +180,10 @@ public List<Value> retrieveRangeValues(Dataset dataset, Range facetRange,
List<Value> rangeValues = new ArrayList<>();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
ResultSet result = sparqlService.querySelect(endPoint.getQueryEndPoint(), endPoint.getTimeout(),
queries(dataset).getQueryFacetRangeValues(classUri.toString(), facetUri.toString(),
facetRange.getUri().toString(), filters, facetRange.getAllLiteral(),
size, size * page, true), endPoint.getGraphs(), endPoint.getOntologyGraphs(),
queries(dataset).getQueryFacetRangeValues(endPoint.getType(), classUri.toString(),
facetUri.toString(), facetRange.getUri().toString(), filters, facetRange.getAllLiteral(),
size, size * page, true),
endPoint.getGraphs(), endPoint.getOntologyGraphs(),
withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
while (result.hasNext()) {
QuerySolution soln = result.nextSolution();
@@ -211,15 +212,54 @@ public List<Value> retrieveRangeValues(Dataset dataset, Range facetRange,
return rangeValues;
}

public Value retrieveFacetRangeValueLabelAndCount(
Dataset dataset, Range facetRange, String rangeValue, MultiValueMap<String, String> filters) {
URI classUri = facetRange.getFacet().getDomain().getUri();
URI facetUri = facetRange.getFacet().getUri();
Value resultValue = null;
SPARQLEndPoint endPoint = endPointRepository.findByDataset(dataset).get(0);
ResultSet result = sparqlService.querySelect(endPoint.getQueryEndPoint(), endPoint.getTimeout(),
queries(dataset).getFacetRangeValueLabelAndCount(
endPoint.getType(), classUri.toString(), facetUri.toString(), facetRange.getUri().toString(),
rangeValue, filters, facetRange.getAllLiteral()),
endPoint.getGraphs(), endPoint.getOntologyGraphs(),
withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
if (result.hasNext()) {
QuerySolution soln = result.nextSolution();
if (soln.contains("?value")) {
RDFNode value = soln.get("?value");
int count = soln.getLiteral("?count").getInt();
String label = null;
if (soln.contains("?label") && !soln.getLiteral("?label").getString().isBlank())
label = soln.getLiteral("?label").getString();
String uri = null;
if (value.isResource())
uri = value.asResource().getURI();
String curie = null;
if (uri != null)
try {
curie = prefixCCMap.abbreviate(new URL(uri).toString());
} catch (Exception ignored) {
}
if (value.isLiteral())
resultValue = new Value(value.asLiteral().getString(), count, uri, curie, label);
else
resultValue = new Value(value.toString(), count, uri, curie, label);
}
}
return resultValue;
}

public List<Value> retrieveRangeValuesContaining(Dataset dataset, Range facetRange,
MultiValueMap<String, String> filters, String containing, int top, String lang) {
URI classUri = facetRange.getFacet().getDomain().getUri();
URI facetUri = facetRange.getFacet().getUri();
List<Value> rangeValues = new ArrayList<>();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
ResultSet result = sparqlService.querySelect(endPoint.getQueryEndPoint(), endPoint.getTimeout(),
queries(dataset).getQueryFacetRangeValuesContaining(classUri.toString(), facetUri.toString(),
facetRange.getUri().toString(), filters, facetRange.getAllLiteral(), containing, top, lang),
queries(dataset).getQueryFacetRangeValuesContaining(
endPoint.getType(), classUri.toString(), facetUri.toString(),
facetRange.getUri().toString(), filters, facetRange.getAllLiteral(), containing, top, lang),
endPoint.getGraphs(), withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
while (result.hasNext()) {
QuerySolution soln = result.nextSolution();
@@ -253,8 +293,9 @@ public Range retrieveRangeMinMax(Dataset dataset, Range facetRange,
URI facetUri = facetRange.getFacet().getUri();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
ResultSet result = sparqlService.querySelect(endPoint.getQueryEndPoint(), endPoint.getTimeout(),
queries(dataset).getQueryFacetRangeMinMax(classUri.toString(), facetUri.toString(),
facetRange.getUri().toString(), filters), endPoint.getGraphs(), endPoint.getOntologyGraphs(),
queries(dataset).getQueryFacetRangeMinMax(endPoint.getType(), classUri.toString(), facetUri.toString(),
facetRange.getUri().toString(), filters),
endPoint.getGraphs(), endPoint.getOntologyGraphs(),
withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
while (result.hasNext()) {
QuerySolution soln = result.nextSolution();
@@ -291,8 +332,9 @@ public void retrieveClassDescriptions(OutputStream out, Dataset dataset, Class d
URI classUri = datasetClass.getUri();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
Model model = sparqlService.queryDescribe(endPoint, endPoint.getTimeout(),
queries(dataset).getQueryClassDescriptions(classUri.toString(), filters, size,size * page),
endPoint.getGraphs(), withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
queries(dataset).getQueryClassDescriptions(endPoint.getType(), classUri.toString(),
filters, size,size * page),
endPoint.getGraphs(), withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
RDFDataMgr.write(out, model, format);
});
}
@@ -302,7 +344,8 @@ public void retrieveClassInstances(OutputStream out, Dataset dataset, Class data
URI classUri = datasetClass.getUri();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
Model model = sparqlService.queryConstruct(endPoint, endPoint.getTimeout(),
queries(dataset).getQueryClassInstances(classUri.toString(), filters, size,size * page),
queries(dataset).getQueryClassInstances(endPoint.getType(), classUri.toString(),
filters, size,size * page),
endPoint.getGraphs(), endPoint.getOntologyGraphs(), withCreds(endPoint.getQueryUsername(),
endPoint.getQueryPassword()));
RDFDataMgr.write(out, model, format);
@@ -313,7 +356,7 @@ public int retrieveSearchInstancesCount(Dataset dataset, String text) {
AtomicInteger count = new AtomicInteger();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
ResultSet result = sparqlService.querySelect(endPoint.getQueryEndPoint(), endPoint.getTimeout(),
queries(dataset).getQuerySearchInstancesCount(text),
queries(dataset).getQuerySearchInstancesCount(endPoint.getType(), text),
endPoint.getGraphs(), withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
while (result.hasNext()) {
QuerySolution soln = result.nextSolution();
@@ -327,7 +370,7 @@ public int retrieveSearchInstancesCount(Dataset dataset, String text) {
public void searchInstances(OutputStream out, Dataset dataset, String text, int size, RDFFormat format) {
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
Model model = sparqlService.queryConstruct(endPoint, endPoint.getTimeout(),
queries(dataset).getQuerySearchInstances(text, size),
queries(dataset).getQuerySearchInstances(endPoint.getType(), text, size),
endPoint.getGraphs(), endPoint.getOntologyGraphs(), withCreds(endPoint.getQueryUsername(),
endPoint.getQueryPassword()));
RDFDataMgr.write(out, model, format);
@@ -338,7 +381,7 @@ public List<Value> searchInstancesTypeFacetValues(Dataset dataset, String text,
List<Value> rangeValues = new ArrayList<>();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
ResultSet result = sparqlService.querySelect(endPoint.getQueryEndPoint(), endPoint.getTimeout(),
queries(dataset).getQuerySearchTypeFacet(text, size, size * page, true),
queries(dataset).getQuerySearchTypeFacet(endPoint.getType(), text, size, size * page, true),
endPoint.getGraphs(), endPoint.getOntologyGraphs(),
withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
while (result.hasNext()) {
@@ -373,7 +416,8 @@ public void getLinkedResourcesLabels(OutputStream out, Dataset dataset, Class da
URI classUri = datasetClass.getUri();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
Model model = sparqlService.queryConstruct(endPoint, endPoint.getTimeout(),
queries(dataset).getQueryClassInstancesLabels(classUri.toString(), filters, size,size * page),
queries(dataset).getQueryClassInstancesLabels(endPoint.getType(), classUri.toString(),
filters, size,size * page),
endPoint.getGraphs(), endPoint.getOntologyGraphs(), withCreds(endPoint.getQueryUsername(),
endPoint.getQueryPassword()));
RDFDataMgr.write(out, model, format);
@@ -385,7 +429,7 @@ public int retrieveClassInstancesCount(Dataset dataset, Class datasetClass, Mult
AtomicInteger count = new AtomicInteger();
endPointRepository.findByDataset(dataset).forEach(endPoint -> {
ResultSet result = sparqlService.querySelect(endPoint.getQueryEndPoint(), endPoint.getTimeout(),
queries(dataset).getQueryClassInstancesCount(classUri.toString(), filters),
queries(dataset).getQueryClassInstancesCount(endPoint.getType(), classUri.toString(), filters),
endPoint.getGraphs(), withCreds(endPoint.getQueryUsername(), endPoint.getQueryPassword()));
while (result.hasNext()) {
QuerySolution soln = result.nextSolution();
Loading