Skip to content

Commit

Permalink
sTeX relationals and sparql queries
Browse files Browse the repository at this point in the history
  • Loading branch information
Jazzpirate committed Jun 17, 2023
1 parent 0058af5 commit 80c93da
Show file tree
Hide file tree
Showing 8 changed files with 345 additions and 187 deletions.
5 changes: 4 additions & 1 deletion src/mmt-api/src/main/info/kwarc/mmt/api/archives/Index.scala
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,10 @@ abstract class NonTraversingImporter extends BuildTarget with GeneralImporter {
case None => throw LocalError("document path must start with narration base")
}
indexDocument(a, doc,graph)
???
val relFile = (a / relational / docPath).setExtension(RDFStore.fileFormat._1)
log("[ -> relational] " + relFile.getPath)
graph.write(relFile)
graph.close
}

def importDocument(a: Archive, dpath: DPath): Unit = {
Expand Down
76 changes: 64 additions & 12 deletions src/mmt-api/src/main/info/kwarc/mmt/api/ontology/ULO.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,15 @@ import org.eclipse.rdf4j.model.{IRI, Resource, Value}
import org.eclipse.rdf4j.model.util.{RDFCollections, Values}
import org.eclipse.rdf4j.model.util.Values.{bnode, iri}
import org.eclipse.rdf4j.model.vocabulary.{DC, OWL, RDF, RDFS, XSD}
import org.eclipse.rdf4j.query.TupleQueryResult
import org.eclipse.rdf4j.repository.RepositoryResult
import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection
import org.eclipse.rdf4j.rio.Rio
import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder
import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries
import org.eclipse.rdf4j.sparqlbuilder.graphpattern.GraphPatterns
import org.eclipse.rdf4j.sparqlbuilder.rdf.RdfPredicate

import java.io.FileOutputStream


Expand Down Expand Up @@ -565,7 +567,11 @@ object ULO {
val has_notation_for = new ObjectProperty("has-notation-for")
val has_language = new DatatypeProperty("has-language")
val has_language_module = new ObjectProperty("has-language-module")

val variable = new ULOClass("variable")
.subclassOf(declaration)
val mathstructure = new ULOClass("mathstructure")
.subclassOf(theory)
val mathstructure_of = new ObjectProperty("mathstructure_of")
}

object RDFStore {
Expand All @@ -583,6 +589,8 @@ trait SubGraph {
def close: Unit
def write(f: File): Unit

def getAll: Set[ULOStatement]

}
class RDFStore(protected val report : frontend.Report) extends RDFRelStoreLike {
import RDFStore._
Expand All @@ -609,7 +617,10 @@ class RDFStore(protected val report : frontend.Report) extends RDFRelStoreLike {
class ISubGraph(conn: SailRepositoryConnection,uri:URI) extends SubGraph {
import scala.jdk.CollectionConverters._
def add(s : ULOStatement) = RDFStore.this.add(s,uri)(conn)
def getAll = conn.getStatements(null,null,null,true,uri).asScala.toSet
def getAllI = conn.getStatements(null,null,null,true,uri).asScala.toSet
def getAll = getAllI.map{s =>
SimpleStatement(s.getSubject,s.getPredicate,s.getObject)
}
conn.clear(uri)
def close = conn.close()
def write(f: File) = {
Expand All @@ -618,7 +629,7 @@ class RDFStore(protected val report : frontend.Report) extends RDFRelStoreLike {
val writer = Rio.createWriter(RDFStore.fileFormat._2, out)
try {
writer.startRDF()
getAll.foreach(writer.handleStatement)
getAllI.foreach(writer.handleStatement)
writer.endRDF()
} finally { out.close() }
}
Expand Down Expand Up @@ -682,8 +693,9 @@ class RDFStore(protected val report : frontend.Report) extends RDFRelStoreLike {
// Querying

// Wrapper for query results
case class QueryResult(it: RepositoryResult[org.eclipse.rdf4j.model.Statement]) {
case class QueryResult(rdf4j: TupleQueryResult) {
import scala.jdk.CollectionConverters._
/*
def toIterator = it.asScala
def foreach(f: org.eclipse.rdf4j.model.Statement => Unit) = toIterator.foreach(f)
def toRelational: List[RelationalElement] = toIterator.toList.map { statement =>
Expand All @@ -709,16 +721,23 @@ class RDFStore(protected val report : frontend.Report) extends RDFRelStoreLike {
)
}
}

*/
def getPaths = {
val name = rdf4j.getBindingNames.get(0)
rdf4j.asScala.map(_.getValue(name)).toList collect {
case iri: IRI if isPath(iri) => iriToPath(iri)
}
}
}

private def get(subject: Option[Path] = None, predicate: Option[ULOTrait] = None, obj: Option[Path] = None, inferred: Boolean = true, context: Option[Path] = None)
= QueryResult(repo.getConnection.getStatements(
subject.map(pathToIri).orNull,
predicate.map(_.toIri).orNull,
obj.map(pathToIri).orNull,
inferred, context.map(pathToIri).toList: _*)
)


def query(q: QUERY) = {
val conn = repo.getConnection
val res = conn.prepareTupleQuery(q.queryString).evaluate()
//conn.close()
QueryResult(res)
}

start
}
Expand Down Expand Up @@ -888,4 +907,37 @@ class RelToRDF extends Extension {
}
}
}
}
sealed trait QUERY {
private case class QUNION(q1: QUERY, q2: QUERY) extends QUERY {
def queryString: String = s"{ ${q1.queryString} } UNION { ${q2.queryString} }"
}
def UNION(q: QUERY): QUERY = QUNION(this,q)

def queryString: String
}
object SPARQL {
sealed trait Object {
def toObjString : String
}
sealed trait Subject extends this.Object

private case class PathO(p:Path) extends Subject {
override def toObjString: String = "<" + pathToIri(p).toString + ">"
}
case class V(s:String) extends Subject {
def toObjString = s"?$s"
}
implicit def pathtosubject(p:Path): Subject = PathO(p)
private case class SelectWhere(`var`:String,where:QUERY) extends QUERY {
def queryString: String = s"SELECT ?${`var`} WHERE { ${where.queryString} }"
}
case class SELECT(`var` : String) {
def WHERE(q : QUERY): QUERY = SelectWhere(`var`,q)
}
import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries._
case class T(subject: Subject,predicate: ULOPredicate,`object`:this.Object) extends QUERY {
def queryString: String = s"${subject.toObjString} <${predicate.toIri.toString}> ${`object`.toObjString} ."
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,13 @@ import info.kwarc.mmt.api.frontend.Controller
import info.kwarc.mmt.api.metadata.{HasMetaData, MetaDatum}
import info.kwarc.mmt.api.modules.Theory
import info.kwarc.mmt.api.objects.{Context, OMA, OMFOREIGN, OMID, OMMOD, OMS, OMV, Term, VarDecl}
import info.kwarc.mmt.api.ontology.{Binary, CustomBinary, CustomUnary, DatatypeProperty, ObjectProperty, RelationalElement, RelationalExtractor, ULO, Unary}
import info.kwarc.mmt.api.ontology.{Binary, CustomBinary, CustomUnary, DatatypeProperty, ObjectProperty, RDFImplicits, RelationalElement, RelationalExtractor, ULO, ULOStatement, Unary}
import info.kwarc.mmt.api.opaque.{OpaqueChecker, OpaqueElement, OpaqueElementInterpreter, OpaqueXML}
import info.kwarc.mmt.api.symbols.{Constant, NestedModule}
import info.kwarc.mmt.stex.{SHTML, STeXServer}
import info.kwarc.mmt.stex.rules.{IntLiterals, ModelsOf, RulerRule, StringLiterals}
import info.kwarc.mmt.stex.xhtml.{HTMLNode, HTMLParser, HTMLText}
import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder

import scala.xml.{Node, NodeSeq}

Expand All @@ -27,6 +28,8 @@ object Symbols {
val meta_notation = mmtmeta_path ? "notation"
val meta_symdoc = mmtmeta_path ? "symboldoc"
val meta_example = mmtmeta_path ? "example"
val meta_definition = mmtmeta_path ? "definition"
val meta_statement = mmtmeta_path ? "statement"
val assoctype_sym = mmtmeta_path ? "assoctype"

}
Expand Down Expand Up @@ -220,29 +223,76 @@ object SHTMLContentManagement {
}


def addSymdoc(parent: Theory, fors: List[GlobalName], node: Node, language: String) = {
parent.metadata.add(MetaDatum(meta_symdoc, OMA(OMS(meta_symdoc), StringLiterals(language) :: OMFOREIGN(node) :: fors.map(OMS(_)))))
def addSymdoc(path:GlobalName,fors:List[GlobalName],node:Node,controller:Controller,rel:ULOStatement => Unit) = {
val c = Constant(OMMOD(path.module),path.name,Nil,None,Some(OMA(OMS(meta_symdoc),OMFOREIGN(node) :: fors.map(OMS(_)))),Some("symdoc"))
controller.add(c)
rel(ULO.para(RDFImplicits.pathToIri(c.path)))
fors.foreach{f =>
rel(ULO.docref(RDFImplicits.pathToIri(f),RDFImplicits.pathToIri(c.path)))
}
//parent.metadata.add(MetaDatum(meta_symdoc, OMA(OMS(meta_symdoc), StringLiterals(language) :: OMFOREIGN(node) :: fors.map(OMS(_)))))
}

def getSymdocs(sym: ContentPath, language: String)(implicit controller:Controller): List[Node] = {
var ret: List[Path] = Nil
controller.depstore.query(sym, -SymdocRelational.documents)(s => ret ::= s)
val us = ret.flatMap { p =>
controller.getO(p) match {
case Some(t) => t.metadata.getValues(meta_symdoc).flatMap {
case OMA(OMS(`meta_symdoc`), StringLiterals(lang) :: OMFOREIGN(node) :: ls) if ls.contains(OMID(sym)) => Some((lang, node))
case _ => None
}
}
import info.kwarc.mmt.api.ontology.SPARQL._
controller.depstore.query(
SELECT("x") WHERE (T(sym,ULO.docref,V("x")) UNION T(V("x"),ULO.defines,sym))
).getPaths.flatMap(controller.getO).collect {
case c : Constant if c.df.isDefined => c.df.get
}.collect {
case OMA(OMS(_),OMFOREIGN(node) :: _) => node
}
}

def addExample(path:GlobalName,fors:List[GlobalName],node:Node,controller:Controller,rel:ULOStatement => Unit) = {
val c = Constant(OMMOD(path.module), path.name, Nil, None, Some(OMA(OMS(meta_example), OMFOREIGN(node) :: fors.map(OMS(_)))), Some("example"))
controller.add(c)
rel(ULO.example(RDFImplicits.pathToIri(c.path)))
fors.foreach { f =>
rel(ULO.example_for(RDFImplicits.pathToIri(c.path),RDFImplicits.pathToIri(f)))
}
// parent.metadata.add(MetaDatum(meta_example, OMA(OMS(meta_example), OMFOREIGN(node) :: fors.map(OMS(_)))))
}

def getExamples(sym: ContentPath, language: String)(implicit controller: Controller): List[Node] = {
import info.kwarc.mmt.api.ontology.SPARQL._
controller.depstore.query(
SELECT("x") WHERE T(V("x"), ULO.example_for, sym)
).getPaths.flatMap(controller.getO).collect {
case c: Constant if c.df.isDefined => c.df.get
}.collect {
case OMA(OMS(_), OMFOREIGN(node) :: _) => node
}
}

def addDefinition(path: GlobalName, fors: List[GlobalName], node: Node, controller: Controller, rel: ULOStatement => Unit) = {
val c = Constant(OMMOD(path.module), path.name, Nil, None, Some(OMA(OMS(meta_definition), OMFOREIGN(node) :: fors.map(OMS(_)))), Some("definition"))
controller.add(c)
rel(ULO.definition(RDFImplicits.pathToIri(c.path)))
fors.foreach { f =>
rel(ULO.defines(RDFImplicits.pathToIri(c.path), RDFImplicits.pathToIri(f)))
rel(ULO.docref(RDFImplicits.pathToIri(f), RDFImplicits.pathToIri(c.path)))
}
val langs = us.filter(_._1 == language)
val defaults = us.filter(p => p._1 == "en" && !langs.contains(p))
val rest = us.filterNot(p => langs.contains(p) || defaults.contains(p))
(langs ::: defaults ::: rest).map(_._2)
}

def addExample(parent: Theory, fors: List[GlobalName], node: Node) = {
parent.metadata.add(MetaDatum(meta_example, OMA(OMS(meta_example), OMFOREIGN(node) :: fors.map(OMS(_)))))
def getDefinition(sym: ContentPath, language: String)(implicit controller: Controller): List[Node] = {
import info.kwarc.mmt.api.ontology.SPARQL._
controller.depstore.query(
SELECT("x") WHERE T(V("x"), ULO.defines, sym)
).getPaths.flatMap(controller.getO).collect {
case c: Constant if c.df.isDefined => c.df.get
}.collect {
case OMA(OMS(_), OMFOREIGN(node) :: _) => node
}
}

def addStatement(path: GlobalName, fors: List[GlobalName], node: Node, controller: Controller, rel: ULOStatement => Unit) = {
val c = Constant(OMMOD(path.module), path.name, Nil, None, Some(OMA(OMS(meta_statement), OMFOREIGN(node) :: fors.map(OMS(_)))), Some("statement"))
controller.add(c)
rel(ULO.statement(RDFImplicits.pathToIri(c.path)))
fors.foreach { f =>
rel(ULO.docref(RDFImplicits.pathToIri(f), RDFImplicits.pathToIri(c.path)))
}
}

def getRuler(tm: Term, ctx: Context)(implicit controller:Controller) = {
Expand Down
48 changes: 26 additions & 22 deletions src/mmt-stex/src/info/kwarc/mmt/stex/STeXServer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ class STeXServer extends ServerExtension("sTeX") with OMDocSHTMLRules with SHTML
case al:ArchiveLike if al.id == id => al
}

private val do_jupyter = false

override def start(args: List[String]): Unit = {
super.start(args)
controller.extman.addExtension(NotationExtractor)
Expand All @@ -45,31 +47,33 @@ class STeXServer extends ServerExtension("sTeX") with OMDocSHTMLRules with SHTML
addExtension(FragmentExtension)
addExtension(BrowserExtension)*/

val index = RusTeX.mh.up / "meta" / "inf" / "courses.json"
if (index.exists()) Try(JSON.parse(File.read(index))).toOption match {
case Some(o:JSONObject) =>
o.foreach {
case (JSONString(id),jo:JSONObject) =>
val map = mutable.HashMap.empty[String, String]
map("id") = id
jo.foreach {
case (JSONString(key),JSONString(value)) =>
map(key) = value
case _ =>
}
map.get("type") match {
case Some("jupyterbook") =>
val store = new JupyterBookArchive(controller, map)
controller.backend.addStore(store)
//val (t,_) = Time.measure {
if (do_jupyter) {
val index = RusTeX.mh.up / "meta" / "inf" / "courses.json"
if (index.exists()) Try(JSON.parse(File.read(index))).toOption match {
case Some(o: JSONObject) =>
o.foreach {
case (JSONString(id), jo: JSONObject) =>
val map = mutable.HashMap.empty[String, String]
map("id") = id
jo.foreach {
case (JSONString(key), JSONString(value)) =>
map(key) = value
case _ =>
}
map.get("type") match {
case Some("jupyterbook") =>
val store = new JupyterBookArchive(controller, map)
controller.backend.addStore(store)
//val (t,_) = Time.measure {
store.importAll
//}
//println("Takes: " + t)
case _ =>
}
case _ =>
}
case _ =>
case _ =>
}
case _ =>
}
case _ =>
}
}


Expand Down
Loading

0 comments on commit 80c93da

Please sign in to comment.