diff --git a/.gitignore b/.gitignore
index ac5e28967f..9f180d8688 100644
--- a/.gitignore
+++ b/.gitignore
@@ -27,6 +27,8 @@ scripts/travis/deploy_key
# IntelliJ Failes
.idea
.idea_modules
+*.iml
+
# Eclipse Files
bin
diff --git a/Test/build.sbt b/Test/build.sbt
new file mode 100644
index 0000000000..aa5b0b6927
--- /dev/null
+++ b/Test/build.sbt
@@ -0,0 +1,12 @@
+import sbt.Keys.libraryDependencies
+
+lazy val mmt = RootProject(file("C:/mmt2/MMT/src"))
+
+lazy val playground = Project(id = "playground", base = file(".")).settings(
+ name := "playground",
+ version := "0.1",
+ scalaVersion := "2.12.8",
+ scalacOptions in ThisBuild ++= Seq("-unchecked", "-deprecation"),
+ // Add further desired libraryDependencies here
+ // e.g. libraryDependencies += "org.jgrapht" % "jgrapht-core" % "1.3.0",
+).dependsOn(mmt).aggregate(mmt)
\ No newline at end of file
diff --git a/Test/project/build.properties b/Test/project/build.properties
new file mode 100644
index 0000000000..7609b47819
--- /dev/null
+++ b/Test/project/build.properties
@@ -0,0 +1 @@
+sbt.version = 1.2.8
\ No newline at end of file
diff --git a/src/Semantic-Computer/Semantic-Computer.iml b/src/Semantic-Computer/Semantic-Computer.iml
new file mode 100644
index 0000000000..d534117121
--- /dev/null
+++ b/src/Semantic-Computer/Semantic-Computer.iml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/build.sbt b/src/build.sbt
index 26cb2ed9e0..e5bcb03d55 100644
--- a/src/build.sbt
+++ b/src/build.sbt
@@ -41,7 +41,7 @@ lazy val mmtMainClass = "info.kwarc.mmt.api.frontend.Run"
// =================================
// GLOBAL SETTINGS
// =================================
-scalaVersion in Global := "2.12.3"
+scalaVersion in Global := "2.12.8"
scalacOptions in Global := Seq(
"-feature", "-language:postfixOps", "-language:implicitConversions", "-deprecation",
"-Xmax-classfile-name", "128", // fix long classnames on weird filesystems
@@ -216,7 +216,7 @@ lazy val lf = (project in file("mmt-lf")).
dependsOn(lfcatalog).
settings(mmtProjectsSettings("mmt-lf"): _*).
settings(
- // libraryDependencies += "org.scala-lang" % "scala-parser-combinators" % "2.12.3" % "test",
+ // libraryDependencies += "org.scala-lang" % "scala-parser-combinators" % "2.12.8" % "test",
)
// =================================
diff --git a/src/frameit-mmt/src/FrameIT.scala b/src/frameit-mmt/src/FrameIT.scala
index e167910264..1300911784 100644
--- a/src/frameit-mmt/src/FrameIT.scala
+++ b/src/frameit-mmt/src/FrameIT.scala
@@ -78,7 +78,6 @@ class FrameViewer extends Extension {
class FrameitPlugin extends ServerExtension("frameit") with Logger with MMTTask {
override val logPrefix = "frameit"
- val test : Class[FrameViewer] = classOf[FrameViewer]
lazy val fv = controller.extman.get(classOf[FrameViewer]).headOption.getOrElse {
val a = new FrameViewer
controller.extman.addExtension(a)
@@ -199,7 +198,7 @@ class FrameitPlugin extends ServerExtension("frameit") with Logger with MMTTask
controller.extman.addExtension(ret)
ret
}
- implicit val ce : CheckingEnvironment = new CheckingEnvironment(controller.simplifier,ErrorThrower,RelationHandler.ignore, this)
+ implicit lazy val ce : CheckingEnvironment = new CheckingEnvironment(controller.simplifier,ErrorThrower,RelationHandler.ignore, this)
val dpath = DPath(URI.http colon "cds.omdoc.org") / "FrameIT"
val sitpath = dpath ? "situation_theory"
diff --git a/src/latex-mmt/paper/conc.tex b/src/latex-mmt/paper/conc.tex
index 79e099d6ae..ee2abb8884 100644
--- a/src/latex-mmt/paper/conc.tex
+++ b/src/latex-mmt/paper/conc.tex
@@ -1,6 +1,6 @@
We have presented a system that allows embedding formal \mmt content inside \latex documents.
The formal content is type-checked in a way that does not affect any existing \latex work flows and results in higher quality \latex than could be easily produced manually.
-Moreover, the formal content may use and be used by any \mmt content formalized elsewhere, which allows the interlinking across document formats.
+Moreover, the formal content may use and be used by any \mmt content formalized elsewhere, which allows interlinking across document formats.
Of course, we are not able to verify the informal parts of a document this way --- only those formulas that are written in \mmt syntax are checked.
But our design supports both gradual formalization and parallel formal-informal representations.
@@ -11,4 +11,4 @@
It is intriguing to apply the same technology to formal proofs.
This is already possible for formal proof terms, but those often bear little resemblance to informal proofs.
Once \mmt supports a language for structured proofs, that could be used to write formal proofs in \mmttex.
-Morever, future work could apply \mmt as a middleware between \latex and other tools, e.g., \mmt could run a computation through a computer algebra to verify a computation in \latex.
+Morever, future work could apply \mmt as a middleware between \latex and other tools, e.g., \mmt could run a computation through a computer algebra system to verify a computation in \latex.
diff --git a/src/latex-mmt/paper/copyright.pdf b/src/latex-mmt/paper/copyright.pdf
new file mode 100644
index 0000000000..16bcbfde42
Binary files /dev/null and b/src/latex-mmt/paper/copyright.pdf differ
diff --git a/src/latex-mmt/paper/intro.tex b/src/latex-mmt/paper/intro.tex
index 1ba92bbb3f..d9ade3c20e 100644
--- a/src/latex-mmt/paper/intro.tex
+++ b/src/latex-mmt/paper/intro.tex
@@ -3,10 +3,14 @@
A major open problem in mathematical document authoring is to elegantly combine formal and informal mathematical knowledge.
Multiple proof assistants and controlled natural language systems have developed custom formats for that purpose, e.g., \cite{isabelle_documentoriented,mizar,plato,naproche}.
+Other languages $L$ allow for integrating \latex into $L$-source files in a literate programming style (e.g., lhs2tex\footnote{\url{https://www.andres-loeh.de/lhs2tex/}} for Haskell) or for integrating $L$-source chunks into \latex files (e.g., SageTeX\footnote{\url{https://github.com/sagemath/sagetex}} for SageMath).
+The design of \mmttex is close to the latter, i.e., to combine \mmt and \latex sources.
+
%Mizar's document format \cite{mizar} has always been designed to closely resemble common narrative mathematical language.
%Recent work for Isabelle opens up the Isabelle kernel in order to permit better integration with other applications, in particular document-oriented editors\cite{isabelle_documentoriented}.
%%\cite{largeformalwikis} develops a general Wiki infrastructure that can use Mizar and Coq as verification backends.
-%In the Plato system \cite{plato}, TeXMacs is used as a document-oriented frontend whose documents are parsed and verified by the Omega proof assistant.
+%In the Plato system \cite{plato}, TeXMacs is used as a document-oriented frontend whose documents are parsed and verified by a proof assistant.
+
%Also, several proof assistants can export a {\LaTeX} version of their native input language (e.g., Isabelle and Mizar).
%Similarly, most can export documents and libraries as HTML, which provides added-value features like navigation and dynamic display.
%The exported documents do not fully utilize the narrative flexibility of {\LaTeX} or HTML and are not meant for further editing.
@@ -16,8 +20,7 @@
%Therefore, the formalization of a mathematical document $D$ in a proof assistant usually requires the -- expensive -- formalization of the background theory that connects $D$ to the foundation.
%Moreover, the fixed implementation is limited to its input language, which in turn cannot be fully understood by any other system.
-The goal of \mmttex is not to introduce a new format but to use \latex documents as the primary user-written documents.
-That makes it most similar to \sTeX \cite{stex}, where authors add content markup that allows converting a \latex to an OMDoc document.
+The goal of \mmttex is very similar to \sTeX \cite{stex}, where authors add content markup that allows converting a \latex to an OMDoc document.
%Via \latex ML \cite{latexml}, this permits generating \omdoc from the same source code that produces PDF documents.
%\omdoc itself \cite{omdoc} is an XML format that integrates both content and narration markup.
%Both \omdoc and sTeX do not define a reference semantics that would permit verifying the documents.
diff --git a/src/latex-mmt/paper/paper.pdf b/src/latex-mmt/paper/paper.pdf
index 66eac364a2..5a2d119674 100644
Binary files a/src/latex-mmt/paper/paper.pdf and b/src/latex-mmt/paper/paper.pdf differ
diff --git a/src/latex-mmt/paper/paper.tex b/src/latex-mmt/paper/paper.tex
index df26601e05..10c61af1ee 100644
--- a/src/latex-mmt/paper/paper.tex
+++ b/src/latex-mmt/paper/paper.tex
@@ -1,7 +1,7 @@
\documentclass[orivec]{llncs}
\pagestyle{plain}
-\usepackage{url}
+\usepackage{url,xcolor,paralist}
\usepackage{wrapfig}
\usepackage{amsmath,amssymb}
\usepackage{xspace}
@@ -21,7 +21,6 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% local macros and configurations
-\usepackage[show]{ed}
\usepackage{basics}
\usepackage{local}
@@ -40,7 +39,7 @@
\maketitle
\begin{abstract}
-Narrative, presentation-oriented assistant systems for mathematics on the one hand and formal, content-oriented ones on the other hand have so far been developed and used largely independently.
+Narrative, presentation-oriented assistant systems for mathematics such as \latex on the one hand and formal, content-oriented ones such as proof assistants and computer algebra systems on the other hand have so far been developed and used largely independently.
The former excel at communicating mathematical knowledge and the latter at certifying its correctness.
\mmttex aims at combining the advantages of the two paradigms.
diff --git a/src/latex-mmt/paper/paper.tex.mmt b/src/latex-mmt/paper/paper.tex.mmt
index 1503eab439..0a025f25c0 100644
--- a/src/latex-mmt/paper/paper.tex.mmt
+++ b/src/latex-mmt/paper/paper.tex.mmt
@@ -18,7 +18,7 @@ constant inv
# 1 ' prec 60
-theory GroupDefs
+theory Division
: ex:?SFOLEQ
=
include ?Group
diff --git a/src/latex-mmt/paper/system.tex b/src/latex-mmt/paper/system.tex
index d7a2bad678..ee09b37876 100644
--- a/src/latex-mmt/paper/system.tex
+++ b/src/latex-mmt/paper/system.tex
@@ -3,36 +3,42 @@ \subsection{Overview}
MMTTeX consists of two components:
\begin{compactitem}
\item an \mmt plugin \texttt{latex-mmt} that converts \mmt theories to \latex packages,
- \item an \latex package \texttt{mmttex.sty}, which allows for embedding \mmt content.
+ \item a small \latex package \texttt{mmttex.sty} (about $100$ loc with only standard dependencies), which allows for embedding \mmt content.
\end{compactitem}
The two components are tied together in bibtex-style, i.e.,
\begin{compactenum}
\item While running \latex on \texttt{doc.tex}, \texttt{mmttex.sty} produces an additional output file \texttt{doc.tex.mmt}, which is a regular \mmt file.
\texttt{doc.tex.mmt} contains all formal content that was embedded in the \texttt{tex} document.
\item \texttt{latex-mmt} is run on \texttt{doc.tex.mmt} and produces \texttt{doc.tex.mmt.sty}.
- This type-checks the embedded formal content and generates macro definitions for rendering it in the next step.
- \item When running \latex the next time, the package \texttt{doc.tex.mmt.sty} is included at the beginning. If changes were made, this run also changes \texttt{doc.tex.mmt}.
+ This type-checks the embedded formal content and generates macro definitions for rendering it in the following step.
+ \item When running \latex the next time, the package \texttt{doc.tex.mmt.sty} is included at the beginning. Now all embedded formal content is rendered using the macro definitions from the previous step. If the formal content changed, \texttt{doc.tex.mmt} also changes.
\end{compactenum}
\noindent
-Note that \texttt{latex-mmt} only needs to be re-run if the document changed.
-That is important for sharing documents with colleagues or publishers who want to run plain \latex: by simply supplying \texttt{doc.tex.mmt.sty} along with \texttt{doc.tex}, running plain \latex is sufficient to rebuild \texttt{doc.pdf}.
+Note that \texttt{latex-mmt} only needs to be re-run if the formal content document changed.
+That is important for sharing documents with colleagues or publishers who want to or can only run plain \latex: by simply supplying \texttt{doc.tex.mmt.sty} along with \texttt{doc.tex}, running plain \latex is sufficient to build \texttt{doc.pdf}.
\subsection{Formal Content in LaTeX Documents}
-\texttt{mmttex.sty} provides presentation-irrelevant and presentation-irrelevant macros for embedding formal content in addition to resp. instead of informal text.
+% Throughout this document, !text! is an abbreviation for \verb|text|
+
+\texttt{mmttex.sty} provides presentation-\emph{irrelevant} and presentation-\emph{relevant} macros for embedding formal content in addition to resp. instead of informal text.
\medskip
\textbf{Presentation-irrelevant macros} only affect \texttt{doc.tex.mmt} and do not produce anything that is visible in the pdf document.
-These macros are usually used to embed the formalization in parallel to the informal text.
-At the lowest level, this is realized by a single macro that takes a string and appends it to \texttt{doc.tex.mmt}.
-On top, we provide a suite of syntactic sugar that mimics the structure of \mmt language.
+These macros can be used to embed a (partial) formalization of the informal text.
+The formalization can occur as a single big chunk, be interspersed with the \latex source akin to parallel markup, or be anything in between.
+Importantly, if split into multiple chunks, one formal chunk may introduce names that are referred to in other formal chunks, and \latex environments are used to build nested scopes for these names.
+
+At the lowest level, this is implemented by a single macro that takes a string and appends it to \texttt{doc.tex.mmt}.
+On top, we provide a suite of syntactic sugar that mimics the structure of the \mmt language.
-As a simple example, we will now define the theory of groups informally and embed its parallel \mmt formalization.
-Because the formalization is invisible in the pdf, we added \lstinline|listings in gray| that show the formalization in exactly the spot where they occur invisibly in the tex file.
-If this is confusing, readers should inspect the source code of this paper at the URL given above.
+As a simple example, we will now define the theory of groups informally and embed its parallel \mmt formalization into this paper.
+Of course, the embedded formalization is invisible in the pdf.
+Therefore, we added \lstinline|listings in gray| that show the presentation-irrelevant macros that occur in the \latex sources of this paper and that embed the formalization.
+If this is confusing, readers may want to inspect the source code of this paper at the URL given above.
-Our example will refer to the theory !SFOLEQ!, which formalizes sorted first-order logic with equality and is defined in the examples archive of \mmt.\footnote{See \url{https://gl.mathhub.info/MMT/examples/blob/master/source/logic/sfol.mmt}.}
+Our example will refer to the theory !SFOLEQ!, which formalizes sorted first-order logic with equality and is defined in the \texttt{examples} archive of \mmt.\footnote{See \url{https://gl.mathhub.info/MMT/examples/blob/master/source/logic/sfol.mmt}.}
To refer to it conveniently, we will import its namespace under the abbreviation !ex!.
\begin{mmttheory}{Group}{ex:?SFOLEQ}
@@ -89,24 +95,25 @@ \subsection{Formal Content in LaTeX Documents}
{tm U --> tm U --> tm U}{[x,y] x*y'}{1 / 2 prec 50}
\end{lstlisting}
-\begin{mmttheory}{GroupDefs}{ex:?SFOLEQ}
+\begin{mmttheory}{Division}{ex:?SFOLEQ}
\mmtinclude{?Group}
\mmtconstant{division}{tm U --> tm U --> tm U}{[x,y] x*y'}{1 / 2 prec 50}
\noindent
-Note that we have not closed the theory yet.
+Note that we have not closed the theory yet, i.e., future formal objects will be processed in the scope of !Division!.
\medskip
\textbf{Presentation-relevant macros} result in changes to the pdf document.
-The most important such macro is one that takes a math formula in \mmt syntax and parses, type-checks, and renders it.
-For this macro, we provide special notation using an active character:
-if we write !"$F$"! instead of $\mathdollar F\mathdollar$, then $F$ is considered to be a math formula in \mmt syntax and processed by \mmt.
-Consider the formula !"forall [x] x / x = e"!. \mmt parses it relative to the current theory, type-checks, and substitutes it with \latex commands that are rendered as "forall [x] x / x = e".
+The most important such macro provided by \texttt{mmttex.sty} is one that takes a math formula in \mmt syntax and parses, type-checks, and renders it.
+For this macro, we provide special syntax that allows using quotes instead of dollars to have formulas processed by \mmt:
+if we write !"$F$"! (including the quotes) instead of $\mathdollar F\mathdollar$, then $F$ is considered to be a math formula in \mmt syntax and processed by \mmt.
+For example, the formula !"forall [x] x / x = e"! is parsed by \mmt relative to the current theory, i.e., !Division!; then \mmt type-checks it and substitutes it with \latex commands. In the previous sentence, the \latex source of the quoted formula is additionally wrapped into a verbatim macro to avoid processing by \mmt; if we remove that wrapper, the quoted formula is rendered into pdf as "forall [x] x / x = e".
-Type checking infers the type of !tm U! of the bound variable !x! and the sort argument of equality.
-These are attached to the formula as tooltips.
+Type checking the above formula infers the type !tm U! of the bound variable !x!.
+This is shown as a tooltip when hovering over the binding location of !x!.
(Tooltip display is supported by many but not all pdf viewers.
-Unfortunately, pdf tooltips are limited to strings so that we cannot show, e.g., MathML even though we could generate it easily.)
+Unfortunately, pdf tooltips are limited to strings so that we cannot show tooltips containing \latex or MathML renderings even though we could generate them easily.)
+Similarly, the sort argument of equality is inferred.
Moreover, every operator carries a hyperlink to the point of its declaration.
%This permits easy type and definition lookup by clicking on symbols.
Currently, these links point to the \mmt server, which is assumed to run locally.
@@ -116,12 +123,13 @@ \subsection{Formal Content in LaTeX Documents}
%This has the additional benefit that the added value is preserved even in the printed version.
\medskip
+\noindent
This is implemented as follows:
\begin{compactenum}
- \item An \mmt formula !"$F$"! simply produces a macro like !\mmt@X! for a running counter !X!.
+ \item An \mmt formula !"$F$"! simply produces a macro call !\mmt@X! for a running counter !X!.
If that macro is undefined, a placeholder is shown and the user is warned that a second compilation is needed.
- Additionally, a definition !mmt@X = $F$! is generated and placed into \texttt{doc.tex.mmt}.
- \item When \texttt{latex-mmt} processes that definition, it additionally generates a macro definition !\newcommand{\mmt@X}{$\ov{F}$}! \texttt{doc.tex.mmt.sty}, where $\ov{F}$ is the intended \latex representation of $F$.
+ Additionally, a definition !mmt@X = $F$! in \mmt syntax is placed into \texttt{doc.tex.mmt}.
+ \item When \texttt{latex-mmt} processes that definition, it additionally generates a macro definition !\newcommand{\mmt@X}{$\ov{F}$}! in \texttt{doc.tex.mmt.sty}, where $\ov{F}$ is the intended \latex representation of $F$.
\item During the next compilation, \mmt@X produces the rendering of $\ov{F}$.
If $F$ did not type-check, additional a \latex error is produced with the error message.
\end{compactenum}
@@ -144,21 +152,22 @@ \subsection{Converting MMT Content To LaTeX}
%In a second step, the background is made available to \latex in the form of \latex packages.
%\mmt already provides a build management infrastructure that allows writing exporters to other formats.
-We run \texttt{latex-mmt} on every theory $T$ that is part of the background knowledge, e.g., !SFOLEQ!, and on those in \texttt{doc.tex.mmt}, resulting in one \latex package (sty file) each.
+We run \texttt{latex-mmt} on every theory $T$ that is part of the background knowledge, e.g., !SFOLEQ!, and on all theories that are part of \texttt{doc.tex.mmt}, resulting in one \latex package (sty file) each.
This package contains one \lstinline|\RequirePackage| macro for every dependency and one \lstinline|\newcommand| macro for every constant declaration.
\texttt{doc.tex.mmt.sty} is automatically included at the beginning of the document and thus brings all necessary generated \latex packages into scope.
The generated \lstinline|\newcommand| macros use (only) the notation of the constant.
-For example, for the constant !operator!, the generated command is essentially !\newcommand{\operator}[2]{#1*#2}!.
-Technically, however, it is much more complex:
+For example, for the constant named !operator! from above, the generated command is essentially !\newcommand{\operator}[2]{#1*#2}!.
+Technically, however, the macro definition is much more complex:
Firstly, instead of !#1*#2!, we produce a a macro definition that generates the right tooltips, hyperreferences, etc.
-Secondly, we use the fully qualified \mmt URI as the \latex macro name to avoid ambiguity when multiple theories define constants of the same local name.
+Secondly, instead of !\operator!, we use the fully qualified \mmt URI as the \latex macro name to avoid ambiguity when multiple theories define constants of the same local name.
-This is an important technical difference between \mmttex and \sTeX \cite{stex}: the latter intentionally generates short human-friendly macro names because they are meant to be called by humans, which requires relatively complex scope management.
+The latter is an important technical difference between \mmttex and \sTeX \cite{stex}: \sTeX intentionally generates short human-friendly macro names because they are meant to be called by humans.
+That requires relatively complex scope management and dynamic loading of macro definitions to avoid ambiguity.
But that is inessential in our case because our macros are called by generated \latex commands (namely those in the definiens of !\mmt@X!).
-But it would be easy to add macros for creating aliases with human-friendly names.
+Nonetheless, it would be easy to add macros to \texttt{mmttex.sty} for creating aliases with human-friendly names.
-The conversion from \mmt to \latex can be easily run in batch mode so that any content available in \mmt form can be easily used as background knowledge in \latex documents.
+The conversion from \mmt to \latex can be easily run in batch mode so that any content available in \mmt can be easily used as background knowledge in \latex documents.
%sTeX uses a more complex mechanism that defines macros only when a theory is included and undefines them when the theory goes out of scope.
%This does not solve the name clash problem and is more brittle when flexibly switching scopes in a document.
diff --git a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/ArchiveBuilder.scala b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/ArchiveBuilder.scala
index b61aa970f3..568e0319d6 100644
--- a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/ArchiveBuilder.scala
+++ b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/ArchiveBuilder.scala
@@ -13,7 +13,7 @@ trait ArchiveBuilder { this: Builder =>
/** tries to find an archive with a given id */
protected def tryArchive(id: String) : Option[LMHHubArchiveEntry] = {
logDebug(s"trying $id as archive")
- val optEntry = mathHub.entries_.collectFirst({
+ val optEntry = mathHub.installedEntries.collectFirst({
case e: LMHHubArchiveEntry if e.id == id => e
})
@@ -50,12 +50,9 @@ trait ArchiveBuilder { this: Builder =>
val tags = entry.tags.map(t => getTagRef("@" + t).getOrElse(return buildFailure(entry.id, s"getTagRef(archive.tag[@$t])")))
// get the description file
- val file = entry.root / entry.archive.properties.getOrElse("description", "desc.html")
- val description = if(entry.root <= file && file.exists()) {
- File.read(file)
- } else { "No description provided" }
+ val description = entry.readLongDescription.getOrElse("No description provided")
- val responsible = entry.archive.properties.getOrElse("responsible", "").split(",").map(_.trim).toList
+ val responsible = entry.properties.getOrElse("responsible", "").split(",").map(_.trim).toList
val narrativeRootPath = entry.archive.narrationBase.toString
val narrativeRoot = getDocument(narrativeRootPath)
@@ -70,7 +67,7 @@ trait ArchiveBuilder { this: Builder =>
Some(IArchive(
ref.parent, ref.id, ref.name,
- getStats(ref.id),
+ getStats(entry.statistics),
ref.title, ref.teaser,
tags,
version,
diff --git a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/Builder.scala b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/Builder.scala
index ed6bf79662..10ee0c0a41 100644
--- a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/Builder.scala
+++ b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/Builder.scala
@@ -1,6 +1,6 @@
package info.kwarc.mmt.mathhub.library.Context.Builders
-import info.kwarc.mmt.api.archives.{Archive, MathHub}
+import info.kwarc.mmt.api.archives.{Archive, MathHub, SimpleStatistics}
import info.kwarc.mmt.api._
import info.kwarc.mmt.api.documents.Document
import info.kwarc.mmt.api.frontend.{Controller, Logger}
@@ -313,4 +313,7 @@ trait Getters { this: Builder =>
trait Statistics { this: Builder =>
// TODO: Build statistics in a cached form
protected def getStats(path: String): Option[List[IStatistic]] = None
+ protected def getStats(stats: Option[SimpleStatistics]): Option[List[IStatistic]] = stats.map {s =>
+ List(IStatistic("any_con", s.constantCount), IStatistic("theo", s.theoryCount))
+ }
}
\ No newline at end of file
diff --git a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/DocumentBuilder.scala b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/DocumentBuilder.scala
index e5e3c75b3f..5a81a513af 100644
--- a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/DocumentBuilder.scala
+++ b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/DocumentBuilder.scala
@@ -23,7 +23,7 @@ trait DocumentBuilder { this: Builder =>
private def makeDocumentRef(path: DPath): Option[IDocumentRef] = {
// if we are the narrationBase of an archive, that is our parent
- val parent = mathHub.entries_.find({
+ val parent = mathHub.installedEntries.find({
case ae: LMHHubArchiveEntry => ae.archive.narrationBase.toString == path.toPath
case _ => false
}) match {
diff --git a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupBuilder.scala b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupBuilder.scala
index 3b7886319d..94cf26c282 100644
--- a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupBuilder.scala
+++ b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupBuilder.scala
@@ -3,7 +3,7 @@ package info.kwarc.mmt.mathhub.library.Context.Builders
import info.kwarc.mmt.api.archives.{LMHHubArchiveEntry, LMHHubGroupEntry}
import info.kwarc.mmt.api.utils.File
import info.kwarc.mmt.mathhub.library.Context.MathHubAPIContext
-import info.kwarc.mmt.mathhub.library.{IGroup, IGroupRef}
+import info.kwarc.mmt.mathhub.library.{IGroup, IGroupRef, IStatistic}
/** a builder for groups */
trait GroupBuilder { this: Builder =>
@@ -12,7 +12,7 @@ trait GroupBuilder { this: Builder =>
protected def tryGroup(id: String) : Option[LMHHubGroupEntry] = {
logDebug(s"trying $id as group")
- val optEntry = mathHub.entries_.collectFirst({
+ val optEntry = mathHub.installedEntries.collectFirst({
case e: LMHHubGroupEntry if e.group == id => e
})
@@ -44,18 +44,14 @@ trait GroupBuilder { this: Builder =>
val ref = getGroupRef(entry.group).getOrElse(return buildFailure(entry.group, "getGroupRef(group.id)"))
// get the description file
- val file = entry.root / entry.properties.getOrElse("description", "desc.html")
- val description = if(entry.root <= file && file.exists()) {
- File.read(file)
- } else { "No description provided" }
-
+ val description = entry.readLongDescription.getOrElse("No description provided")
val responsible = entry.properties.getOrElse("responsible", "").split(",").map(_.trim).toList
- val archives = mathHub.entries_.collect({case archive: LMHHubArchiveEntry if archive.group == ref.id => archive.id })
+ val archives = entry.members.collect({case archive: LMHHubArchiveEntry => archive.id })
Some(IGroup(
ref.id, ref.name,
- getStats(ref.id),
+ getStats(entry.statistics),
ref.title, ref.teaser,
description,
diff --git a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupsBuilder.scala b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupsBuilder.scala
index 7b7a45b6f0..83c4fd3d54 100644
--- a/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupsBuilder.scala
+++ b/src/mathhub-mmt/src/info/kwarc/mmt/mathhub/library/Context/Builders/GroupsBuilder.scala
@@ -5,7 +5,7 @@ import info.kwarc.mmt.mathhub.library.IGroupRef
trait GroupsBuilder { this: Builder =>
def getGroups() : List[IGroupRef] = {
val mh = mathHub
- mh.entries_
+ mh.installedEntries
.collect({case ae: mh.MathHubGroupEntry => ae.group }).distinct
.flatMap(e => getGroupRef(e))
}
diff --git a/src/mmt-api/resources/latex/unicode-latex-map b/src/mmt-api/resources/latex/unicode-latex-map
index eddefaf4ce..25f53ef6a1 100644
--- a/src/mmt-api/resources/latex/unicode-latex-map
+++ b/src/mmt-api/resources/latex/unicode-latex-map
@@ -108,8 +108,8 @@ jbigoplus|⨁
jbigotimes|⨂
jsubset|⊂
jsupset|⊃
-jsq|⊆
-jsubseteq|⊆
+jsq|⊆
+jsubseteq|⊆
jsupseteq|⊇
jnsubset|⊄
jnsupset|⊅
@@ -148,6 +148,8 @@ jtriangleright|▷
jtriangleleft|◁
jblacktriangleright|▶
jblacktriangleleft|◀
+jsmblksquare|▪
+jsmwhtsquare|▫
jder|⊦
jrightassert|⊦
jvdash|⊢
diff --git a/src/mmt-api/resources/mmtrc b/src/mmt-api/resources/mmtrc
index 760cb5aeac..e8b6e3358b 100644
--- a/src/mmt-api/resources/mmtrc
+++ b/src/mmt-api/resources/mmtrc
@@ -43,6 +43,6 @@ models http://models.latin.omdoc.org/
// entries for MitM systems, moved here from mitm/config.default.json
#mitm
-GAP localhost 26133
+GAP field.informatik.uni-erlangen.de 26133
Sage localhost 26136
-Singular localhost 26135
\ No newline at end of file
+Singular field.informatik.uni-erlangen.de 26135
\ No newline at end of file
diff --git a/src/mmt-api/resources/versioning/system.txt b/src/mmt-api/resources/versioning/system.txt
index 946789e619..aac58983e6 100644
--- a/src/mmt-api/resources/versioning/system.txt
+++ b/src/mmt-api/resources/versioning/system.txt
@@ -1 +1 @@
-16.0.0
+17.0.0
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/Archive.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/Archive.scala
index 1a570c9b8d..7f2faf9ce7 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/Archive.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/Archive.scala
@@ -3,6 +3,7 @@ package info.kwarc.mmt.api.archives
import info.kwarc.mmt.api._
import info.kwarc.mmt.api.backend._
import info.kwarc.mmt.api.frontend._
+import info.kwarc.mmt.api.modules.Theory
import info.kwarc.mmt.api.objects._
import info.kwarc.mmt.api.ontology._
import info.kwarc.mmt.api.utils._
@@ -136,6 +137,16 @@ class Archive(val root: File, val properties: mutable.Map[String, String], val r
else None
}
+ /** Returns (#Theories,#Constants)**/
+ def stats(implicit controller: Controller): SimpleStatistics = {
+ // val arch = controller.backend.getArchive(a).get
+ val ths = allContent.flatMap{mp =>
+ Try(controller.get(mp).asInstanceOf[Theory]).toOption
+ }
+ val const = ths.flatMap(_.getConstants)
+ SimpleStatistics(ths.length,const.length)
+ }
+
/**
* Kinda hacky; can be used to get all Modules residing in this archive somewhat quickly
* TODO do properly
@@ -281,3 +292,13 @@ object Archive {
/** returns a trivial TraverseMode */
def traverseIf(e: String): TraverseMode = TraverseMode(extensionIs(e), _ => true, parallel = false)
}
+
+/** very simple statistics implementation */
+case class SimpleStatistics(theoryCount: Int, constantCount: Int) {
+ def + (other: SimpleStatistics): SimpleStatistics = {
+ SimpleStatistics(theoryCount + other.theoryCount, constantCount + other.constantCount)
+ }
+}
+object SimpleStatistics {
+ val empty: SimpleStatistics = SimpleStatistics(0, 0)
+}
\ No newline at end of file
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildDependencies.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildDependencies.scala
index 2f871b72ad..739e0b3c4e 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildDependencies.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildDependencies.scala
@@ -29,6 +29,7 @@ sealed abstract class BuildDependency extends Dependency {
*/
case class FileBuildDependency(key: String, archive: Archive, inPath: FilePath) extends BuildDependency {
def toJString: String = inPath.toString + " (" + key + ")"
+ // ToDo: The controller isn't actually used here, I think it can go. -- jbetzend
def getErrorFile(controller: Controller): File = (archive / errors / key / inPath).addExtension("err")
}
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildTarget.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildTarget.scala
index a1aaeb5cb3..5d811a61e5 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildTarget.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/BuildTarget.scala
@@ -478,20 +478,36 @@ abstract class TraversingBuildTarget extends BuildTarget {
/** auxiliary method of runBuildTaskIfNeeded: implements the semantics of Update to determine whether a task has to be built */
// TODO specify the semantics of Update
- private def rebuildNeeded(deps: Set[Dependency], bt: BuildTask, level: Level): Boolean = {
- val errorFile = bt.asDependency.getErrorFile(controller)
- val errs = hadErrors(errorFile, level)
- val mod = modified(bt.inFile, errorFile)
- level <= Level.Force || mod || errs ||
- deps.exists {
- case bd: BuildDependency =>
- val errFile = bd.getErrorFile(controller)
- modified(errFile, errorFile)
- case PhysicalDependency(fFile) => modified(fFile, errorFile)
- case _ => false // for now
- } || bt.isDir && bt.children.getOrElse(Nil).exists { bf =>
+ private def rebuildNeeded(deps: Set[Dependency], bt: BuildTask, level: Level): Boolean =
+ {
+ val errorFile : File = bt.asDependency.getErrorFile(controller)
+
+ lazy val forced : Boolean = level <= Level.Force
+ lazy val outex : Boolean = {
+ // usually, we build outfiles, that don't exist.
+ // However, for .deps files, we don't need to.
+ val ext = bt.outFile.getExtension
+ !bt.outFile.exists() && (if (ext.isDefined) { ext.get != "deps" } else true)
+ }
+ lazy val modded : Boolean = modified(bt.inFile, errorFile)
+ lazy val errors : Boolean = hadErrors(errorFile, level)
+
+ def singleDepModded(dep : Dependency) : Boolean = dep match {
+ case bd: BuildDependency =>
+ val errFile = bd.getErrorFile(controller)
+ modified(errFile, errorFile)
+
+ case PhysicalDependency(fFile) => modified(fFile, errorFile)
+ case _ => false // for now
+ }
+
+ lazy val depsModded : Boolean = deps.exists(singleDepModded)
+
+ lazy val isDir : Boolean = bt.isDir && bt.children.getOrElse(Nil).exists { bf =>
modified(bf.asDependency.getErrorFile(controller), errorFile)
}
+
+ forced || outex || modded || errors || depsModded || isDir
}
/** auxiliary method of runBuildTaskIfNeeded */
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/LMHHub.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/LMHHub.scala
index ac662838f6..47c82b6687 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/LMHHub.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/LMHHub.scala
@@ -15,16 +15,16 @@ abstract class LMHHub extends Logger {
protected def report = controller.report
/** find all locally installed entries */
- protected def entries_ : List[LMHHubEntry]
+ def installedEntries : List[LMHHubEntry]
/** find all repositories given a specification */
- def entries(spec: String*): List[LMHHubEntry] = if (spec.nonEmpty) entries_.filter(e => spec.exists(e.matches)) else entries_
+ def entries(spec: String*): List[LMHHubEntry] = if (spec.nonEmpty) installedEntries.filter(e => spec.exists(e.matches)) else installedEntries
/** finds all archive entries available locally */
- def archiveEntries: List[LMHHubArchiveEntry] = entries_.collect({case a: LMHHubArchiveEntry => a})
+ def archiveEntries: List[LMHHubArchiveEntry] = installedEntries.collect({case a: LMHHubArchiveEntry => a})
/** finds all group entries available locally */
- def groupEntries: List[LMHHubGroupEntry] = entries_.collect({case g: LMHHubGroupEntry => g})
+ def groupEntries: List[LMHHubGroupEntry] = installedEntries.collect({case g: LMHHubGroupEntry => g})
/** finds all directory entries available locally */
- def dirEntries: List[LMHHubDirectoryEntry] = entries_.collect({case d: LMHHubDirectoryEntry => d})
+ def dirEntries: List[LMHHubDirectoryEntry] = installedEntries.collect({case d: LMHHubDirectoryEntry => d})
/** checks if a group exists remotely */
def hasGroup(name: String): Boolean
@@ -127,35 +127,52 @@ trait LMHHubEntry extends Logger {
val logPrefix: String = "lmh"
def report: Report = controller.report
- /** the name of the group of this entry */
- lazy val group: String = id.split("/").toList.headOption.getOrElse("")
- /** the name of this archive */
- lazy val name: String = id.split("/").toList.lastOption.getOrElse("")
-
- // Things to be implemented
+ /** the local root of this archive entry */
+ val root: File
/** loads the LMHHubEntry or throw an error if it is invalid */
def load(): Unit
+
+ /** the properties of this entry, if any */
+ def properties: Map[String, String]
+
+ /** reads the long description */
+ def readLongDescription: Option[String] = {
+ val filename = properties.getOrElse("description", "desc.html")
+ List(root, root / "META-INF").map(_ / filename).find(_.exists).map(File.read)
+ }
+
+
/** the id of this archive entry */
val id: String
- /** the local root of this archive entry */
- val root: File
+ /** the name of the group of this entry */
+ lazy val group: String = id.split("/").dropRight(1).mkString("/")
+ /** the name of this archive */
+ lazy val name: String = id.split("/").lastOption.getOrElse("")
+
/** check if this archive matches a given spec */
def matches(spec : String): Boolean = LMHHub.matchesComponents(spec, id)
+
+
+ // version control things
+
/** download information about archive versions from the remote */
def fetch: Boolean
/** push the newest version of this archive to the remote */
def push: Boolean
/** pull the newest version of this archive from the remote */
def pull: Boolean
+
/** reset the remote url of this archive to a given one */
def setRemote(remote : String) : Boolean
/** fix the remote url of the archive */
def fixRemote: Boolean = setRemote(hub.remoteURL(id))
+
/** returns the physical version (a.k.a commit hash) of an installed archive */
def physicalVersion: Option[String]
/** returns the logical version (a.k.a branch) of an installed archive */
def logicalVersion: Option[String]
+
/** gets the version of an installed archive, a.k.a. the branch of the git commit hash */
def version: Option[String] = logicalVersion.map(Some(_)).getOrElse(physicalVersion)
}
@@ -164,16 +181,21 @@ trait LMHHubEntry extends Logger {
trait LMHHubDirectoryEntry extends LMHHubEntry {
def load(): Unit = {}
- lazy val id: String = (root / "..").canonical.name + "/" + root.canonical.name
+ def properties: Map[String, String] = Map()
+ def statistics: Option[SimpleStatistics] = None
+
+ // Read the properties from the manifest
+ lazy val id: String = {
+ properties.getOrElse("id", {
+ val canon = s"${(root / "..").canonical.name}/${root.canonical.name}"
+ log(s"Unable to read id from manifest, falling back to $canon")
+ canon
+ })
+ }
}
/** represents a single archive inside an [[LMHHub]] that is installed on disk */
trait LMHHubArchiveEntry extends LMHHubDirectoryEntry {
- /** returns the [[Archive]] instance belonging to this local ArchiveHub entry */
- def archive : Archive = {
- load()
- controller.backend.getArchive(root).get
- }
/** loads this archive into the controller (if not done already) */
override def load() {
controller.backend.getArchive(root).getOrElse {
@@ -185,12 +207,21 @@ trait LMHHubArchiveEntry extends LMHHubDirectoryEntry {
}
}
- /** get the id of this archive */
- override lazy val id: String = archive.id
+ /** returns the [[Archive]] instance belonging to this local ArchiveHub entry */
+ lazy val archive : Archive = {
+ load()
+ controller.backend.getArchive(root).get
+ }
+
+ /** reads the archive props */
+ override def properties: Map[String, String] = archive.properties.toMap
+
+ /** reads archive statistics */
+ override def statistics: Option[SimpleStatistics] = Some(archive.stats(controller))
/** the list of dependencies of this archive */
def dependencies: List[String] = {
- val string = archive.properties.getOrElse("dependencies", "").replace(",", " ")
+ val string = properties.getOrElse("dependencies", "").replace(",", " ")
// check if we have a meta-inf repository, and if yes install it
val deps = (if(hub.hasGroup(group)) List(group + "/meta-inf") else Nil) ::: stringToList(string)
deps.distinct
@@ -199,7 +230,7 @@ trait LMHHubArchiveEntry extends LMHHubDirectoryEntry {
// TODO: Change meta-inf property used here
/** the list of tags associated with this archive */
def tags: List[String] = {
- val mfTags = archive.properties.get("tags").map(stringToList(_, ",")).getOrElse(Nil)
+ val mfTags = properties.get("tags").map(stringToList(_, ",")).getOrElse(Nil)
(List("group/"+group) ::: mfTags).map(_.toLowerCase)
}
}
@@ -209,7 +240,7 @@ case class NotLoadableArchiveEntry(root: File) extends api.Error("not a loadable
/** represents a group archive inside an [[LMHHub]] that is installed on disk */
trait LMHHubGroupEntry extends LMHHubDirectoryEntry {
- private var groupManifest: mutable.Map[String, String] = null
+ private var groupManifest: Map[String, String] = null
override def load(): Unit = {
val manifest = {
@@ -217,20 +248,32 @@ trait LMHHubGroupEntry extends LMHHubDirectoryEntry {
.find(_.exists).getOrElse(throw NotLoadableGroupEntry(root))
}
try {
- groupManifest = File.readProperties(manifest)
+ groupManifest = File.readProperties(manifest).toMap
} catch {
case e: Exception => throw NotLoadableGroupEntry(root).setCausedBy(e)
}
}
+ /** finds all LMH Hub entries that are a member of this group */
+ def members: List[LMHHubEntry] = {
+ hub.installedEntries.filter(c => c.group == group && c.id != id)
+ }
+
+ /** collects statistics in this archive */
+ override def statistics: Option[SimpleStatistics] = {
+ Some(
+ members
+ .collect({case de: LMHHubDirectoryEntry => de.statistics})
+ .collect({ case Some(s: SimpleStatistics) => s})
+ .fold(SimpleStatistics.empty)(_ + _)
+ )
+ }
+
/** the group properties */
- def properties : mutable.Map[String, String] = {
+ override def properties : Map[String, String] = {
load()
groupManifest
}
-
- // TODO: Do we want to read the entry from the folder
- // override lazy val id: String = properties.getOrElse("id", (root / "..").name) + "/" + "meta-inf"
}
/** Error that is thrown when an archive on disk is not an actual archive */
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/MathHub.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/MathHub.scala
index 7210e93e67..69caf9eee4 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/MathHub.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/MathHub.scala
@@ -141,7 +141,7 @@ class MathHub(val controller: Controller, var local: File, var remote: URI, var
}
/** find all the archives known to the controller */
- def entries_ : List[MathHubEntry] = {
+ def installedEntries : List[MathHubEntry] = {
val folders = new ListBuffer[File]
debug("scanning for archives")
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/package.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/package.scala
index a75039374a..dbaa84c157 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/archives/package.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/archives/package.scala
@@ -12,13 +12,14 @@ import archives._
*
* The list of currently open archives (from which MMT will load content if needed) is maintained by the [[backend.Backend]].
*/
-package object archives {
- val source = RedirectableDimension("source")
- val content = RedirectableDimension("content")
- val narration = RedirectableDimension("narration")
- val relational = RedirectableDimension("relational")
- val notational = RedirectableDimension("notations")
- val errors = RedirectableDimension("errors")
- val export = RedirectableDimension("export")
- val flat = RedirectableDimension("flat")
+package object archives
+{
+ val source : ArchiveDimension = RedirectableDimension("source")
+ val content : ArchiveDimension = RedirectableDimension("content")
+ val narration : ArchiveDimension = RedirectableDimension("narration")
+ val relational : ArchiveDimension = RedirectableDimension("relational")
+ val notational : ArchiveDimension = RedirectableDimension("notations")
+ val errors : ArchiveDimension = RedirectableDimension("errors")
+ val export : ArchiveDimension = RedirectableDimension("export")
+ val flat : ArchiveDimension = RedirectableDimension("flat")
}
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/objects/AnonymousModules.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/objects/AnonymousModules.scala
index a7bb8c8bcf..ff2aa860c7 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/objects/AnonymousModules.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/objects/AnonymousModules.scala
@@ -11,6 +11,7 @@ import utils._
trait AnonymousBody extends ElementContainer[OML] with DefaultLookup[OML] {
val decls: List[OML]
def getDeclarations = decls
+ def toSubstitution : Substitution = decls.map{case OML(name,_,df,_,_) => Sub(name,df.get)}
def toTerm: Term
override def toString = getDeclarations.mkString("{", ", ", "}")
}
@@ -41,6 +42,14 @@ class AnonymousTheory(val mt: Option[MPath], val decls: List[OML]) extends Anony
val m = mt.map(_.toString).getOrElse("")
m + super.toString
}
+ def canEqual(a: Any) = a.isInstanceOf[DiagramNode]
+ override def equals(that: Any): Boolean =
+ that match {
+ case that: AnonymousTheory => (that.mt == this.mt) && (that.decls == this.decls)
+ case _ => false
+ }
+ override def hashCode: Int = { mt.hashCode() + decls.hashCode() }
+
}
/** bridges between [[AnonymousTheory]] and [[Term]] */
@@ -64,6 +73,7 @@ object AnonymousTheoryCombinator {
class AnonymousMorphism(val decls: List[OML]) extends AnonymousBody {
def toTerm = AnonymousMorphismCombinator(decls)
def add(d: OML) = new AnonymousMorphism(decls ::: List(d))
+
}
/** bridges between [[AnonymousMorphism]] and [[Term]] */
@@ -91,6 +101,13 @@ sealed abstract class DiagramElement {
case class DiagramNode(label: LocalName, theory: AnonymousTheory) extends DiagramElement {
def toTerm = OML(label, Some(TheoryType(Nil)), Some(theory.toTerm))
override def toString = s"$label:THEY=$theory"
+ def canEqual(a: Any) = a.isInstanceOf[DiagramNode]
+ override def equals(that: Any): Boolean =
+ that match {
+ case that: DiagramNode => (that.label == this.label) && (that.theory == this.theory)
+ case _ => false
+ }
+ override def hashCode: Int = { label.hashCode() + theory.hashCode() }
}
/** an arrow in an [[AnonymousDiagram]]
* @param label the label of this arrow
@@ -139,22 +156,47 @@ case class AnonymousDiagram(val nodes: List[DiagramNode], val arrows: List[Diagr
def getAllArrowsTo(label : LocalName) : List[DiagramArrow] = {
arrows.filter(a => a.to == label)
}
- def viewOf(from:DiagramNode,to:DiagramNode): List[OML] = {
- /* Finding the rename arrows */
- val distTo = getDistArrowsTo(to.label)
- val rens: List[OML] = distTo.filter(_.label.toString.contains("rename")).flatMap(a => a.morphism.decls)
- val new_decls = from.theory.decls.map { curr =>
- var curr_ren = rens.find(oml => (oml.name == curr.name))
- var new_decl = curr
- while (curr_ren != None) {
- var df = curr_ren.get.df // TODO: Do we want to have getOrElse here?
- new_decl = df.get.asInstanceOf[OML]
- curr_ren = rens.find(oml => (oml.name == new_decl.name))
- }
- new_decl
+ /* Finding the path between two nodes using their labels */
+ def path(sourceLabel : LocalName, targetLabel : LocalName) : List[DiagramArrow] ={
+ arrows.find(a => a.to == targetLabel) match {
+ case None => Nil
+ case Some(a) =>
+ a :: (if (a.label == sourceLabel) Nil else path(sourceLabel,a.label))
}
- new_decls
}
+
+ /* A function to compose two substitutions
+ * The function assume that assign1 has no duplicate assignments for the same symbol.
+ */
+ def compose(assign1 : List[OML],assign2 : List[OML]): List[OML] = {
+ val new_assigns : List[OML] = assign1.map{ curr : OML =>
+ var temp = curr.name
+ val it = assign2.iterator
+ while(it.hasNext){
+ val n = it.next()
+ if(temp == n.name){
+ temp = n.df.asInstanceOf[OML].name
+ }
+ }
+ val new_decl = new OML(temp,None,None,None)
+ new OML(curr.name,curr.tp,Some(new_decl),curr.nt,curr.featureOpt)
+ }
+ new_assigns
+ }
+
+ /* Finding the views
+ * - each view is a list of assignments of terms to constants TODO: we consider now only constants to constants
+ * - find the path (list of views) from the source to the target
+ * - calculate the flat list of assignments in these views (order matters for cases like [a |-> b, b |-> c])
+ * An assignment name = definition is represented as an OML(name, type, definition, notationOpt, featureOpt)
+ * */
+ def viewOf(source : DiagramNode, target : DiagramNode): List[OML] = {
+
+ val arrows: List[DiagramArrow] = path(source.label, target.label)
+ val assignments: List[OML] = arrows.flatMap(a => a.morphism.decls)
+ compose(source.theory.decls,assignments)
+ }
+
def getDistArrowWithNodes: Option[(DiagramNode,DiagramNode,DiagramArrow)] = getDistArrow flatMap {a => getArrowWithNodes(a.label)}
def getElements = nodes:::arrows
/** replaces every label l with r(l) */
@@ -204,7 +246,6 @@ object AnonymousDiagramCombinator {
Some(ad)
case _ => None
}
-
}
object OMLList {
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/parser/StructureParser.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/parser/StructureParser.scala
index 4d1d880cd4..7614b50cfb 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/parser/StructureParser.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/parser/StructureParser.scala
@@ -195,11 +195,15 @@ class KeywordBasedParser(objectParser: ObjectParser) extends Parser(objectParser
case IsDoc(dp) =>
val doc = controller.globalLookup.getAs(classOf[Document],dp)
readInDocument(doc)(state)
- (doc.getDeclarations.last,state)
+ (doc.getDeclarations.lastOption.getOrElse {
+ throw ParseError(dp + " is empty: " + state)
+ },state)
case IsMod(mp, rd) =>
val mod = controller.globalLookup.getAs(classOf[ModuleOrLink],mp)
readInModule(mod, mod.getInnerContext, new Features(Nil,Nil))(state)
- (mod.getDeclarations.last, state)
+ (mod.getDeclarations.lastOption.getOrElse {
+ throw ParseError(mp + " is empty: " + state)
+ }, state)
}
}
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/symbols/DerivedDeclaration.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/symbols/DerivedDeclaration.scala
index 5ad7e9fb6f..87c3031b53 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/symbols/DerivedDeclaration.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/symbols/DerivedDeclaration.scala
@@ -694,3 +694,18 @@ class BoundTheoryParameters(id : String, pi : GlobalName, lambda : GlobalName, a
// def modules(d: DerivedDeclaration): List[Module] = Nil
def check(d: DerivedDeclaration)(implicit env: ExtendedCheckingEnvironment) {}
}
+
+object StructuralFeatureUtil {
+ def externalDeclarationsToElaboration(decls: List[Constant]) = {
+ new Elaboration {
+ val elabDecls = decls
+ def domain = elabDecls map {d => d.name}
+ def getO(n: LocalName) = {
+ elabDecls.find(_.name == n)
+ }
+ }
+ }
+ def singleExternalDeclaration(d: Constant) = {
+ externalDeclarationsToElaboration(List(d))
+ }
+}
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/web/JSONBasedGraphServer.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/web/JSONBasedGraphServer.scala
index 969a3baec0..470cb37781 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/web/JSONBasedGraphServer.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/web/JSONBasedGraphServer.scala
@@ -12,17 +12,18 @@ import info.kwarc.mmt.api.presentation.{HTMLPresenter, MMTDocExporter, MathMLPre
import info.kwarc.mmt.api.symbols._
import info.kwarc.mmt.api.utils._
+
import scala.util.Try
/**
* Created by jazzpirate on 07.06.17.
*/
-class JSONBasedGraphServer extends ServerExtension("jgraph") {
- override val logPrefix = "jgraph"
- private case class CatchError(s : String) extends Throwable
+
+class DirectGraphBuilder extends Extension {
+
+ private case class CatchError(s: String) extends Throwable
override def start(args: List[String]) {
- controller.extman.addExtension(new JGraphSideBar)
controller.extman.addExtension(new JDocgraph)
controller.extman.addExtension(new JThgraph)
controller.extman.addExtension(new JPgraph)
@@ -31,27 +32,44 @@ class JSONBasedGraphServer extends ServerExtension("jgraph") {
super.start(args)
}
- lazy val sidebar = controller.extman.get(classOf[JGraphSideBar]).head
+ def apply(uri: String, key: String, sem: String = "none", comp: String = "default solver"): JSON = {
+ val exp = controller.extman.getOrAddExtension(classOf[JGraphExporter], key).getOrElse {
+ throw CatchError(s"exporter $key not available")
+ }
+ log("Computing " + key + " for " + uri + "... ")
+ val ret = exp.buildGraph(uri)
+ log("Done")
+ ret
+ }
+}
+
+class JSONBasedGraphServer extends ServerExtension("jgraph") {
+ override val logPrefix = "jgraph"
+ private case class CatchError(s : String) extends Throwable
- def apply(request: ServerRequest): ServerResponse = {
+ override def start(args: List[String]) {
+ controller.extman.addExtension(new JGraphSideBar)
+ controller.extman.addExtension(new DirectGraphBuilder)
+ }
+
+ lazy val sidebar = controller.extman.get(classOf[JGraphSideBar]).head
+ lazy val buil = controller.extman.get(classOf[DirectGraphBuilder]).head
+
+ def apply(request:ServerRequest): ServerResponse = {
log("Paths: " + request.pathForExtension)
log("Query: " + request.query)
log("Path: " + request.parsedQuery("uri"))
if (request.pathForExtension.headOption == Some("menu")) {
val id = request.parsedQuery("id").getOrElse("top")
- log("Returing menu for " + id)
+ log("Returning menu for " + id)
if (id == "full") ServerResponse.fromJSON(sidebar.getJSON("top",true))
else ServerResponse.fromJSON(sidebar.getJSON(id))
} else if (request.pathForExtension.headOption == Some("json")) {
val uri = request.parsedQuery("uri").getOrElse(return ServerResponse.errorResponse(GetError("Not a URI"), "json"))
val key = request.parsedQuery("key").getOrElse("pgraph")
- val exp = controller.extman.getOrAddExtension(classOf[JGraphExporter], key).getOrElse {
- throw CatchError(s"exporter $key not available")
- }
- log("Computing " + key + " for " + uri + "... ")
- val ret = ServerResponse.fromJSON(exp.buildGraph(uri))
- log("Done")
+ val graph = buil(uri, key)
+ val ret = ServerResponse.fromJSON(graph)
ret
} else ServerResponse.errorResponse("Invalid path", "json")
}
@@ -155,7 +173,6 @@ abstract class SimpleJGraphExporter(key : String) extends JGraphExporter(key) {
override def logPrefix: String = key
val builder : JGraphBuilder
val selector : JGraphSelector
-
def buildGraph(s : String) : JSON = {
val (ths,vs) = selector.select(s)(controller)
log("building...")
@@ -163,9 +180,10 @@ abstract class SimpleJGraphExporter(key : String) extends JGraphExporter(key) {
log("Done.")
res
}
-
}
+
+
class JDocgraph extends SimpleJGraphExporter("docgraph"){
val builder = GraphBuilder.PlainBuilder
val selector = new JGraphSelector {
@@ -248,6 +266,15 @@ class JPgraph extends SimpleJGraphExporter("pgraph") {
log("Done.")
ret
}
+ /** private def allattacks = {
+ log("Loading attacks...")
+ val ret = (controller.depstore.getInds(IsAttack) collect {
+ case mp : MPath => mp
+ }).toList
+ log("Done.")
+ ret
+ }
+ */
}
class JArchiveGraph extends SimpleJGraphExporter("archivegraph") {
val builder = GraphBuilder.AlignmentBuilder(log(_,None))
@@ -293,7 +320,10 @@ class JMPDGraph extends SimpleJGraphExporter("mpd") {
c.rl.get match {
case "Law" => "model"
case "BoundaryCondition" => "boundarycondition"
- case _ => "theory"}
+ case _ => "theory"
+ case "Accepted" => "acceptedtheory"
+ case "Rejected" => "rejectedtheory"
+ case "Undecided" => "undecidedtheory"}
}).get
ostyle match {
diff --git a/src/mmt-api/src/main/info/kwarc/mmt/api/web/REPLServer.scala b/src/mmt-api/src/main/info/kwarc/mmt/api/web/REPLServer.scala
index cdbc207834..dc3ea04058 100644
--- a/src/mmt-api/src/main/info/kwarc/mmt/api/web/REPLServer.scala
+++ b/src/mmt-api/src/main/info/kwarc/mmt/api/web/REPLServer.scala
@@ -27,6 +27,7 @@ class REPLSession(val doc: Document, val id: String, interpreter: TwoStepInterpr
val se = interpreter(ps)(errorCont)
se match {
case r: MRef => currentScope = IsMod(r.target, LocalName.empty)
+ case Include(_) =>
case m: ModuleOrLink => currentScope = IsMod(m.modulePath, LocalName.empty)
case nm: NestedModule => currentScope = IsMod(nm.module.path, LocalName.empty)
case _ =>
diff --git a/src/mmt-coq/src/info/kwarc/mmt/coq/Importer.scala b/src/mmt-coq/src/info/kwarc/mmt/coq/Importer.scala
index 95ef3eb652..e3be82d240 100644
--- a/src/mmt-coq/src/info/kwarc/mmt/coq/Importer.scala
+++ b/src/mmt-coq/src/info/kwarc/mmt/coq/Importer.scala
@@ -349,6 +349,8 @@ class Importer extends archives.Importer {
case =>
// TODO something
Nil
+ case {_*} =>
+ Nil
case _ : scala.xml.Comment => Nil
case sec @ =>
val uri = URI((sec\"@uri").mkString)
diff --git a/src/mmt-coq/src/info/kwarc/mmt/coq/Syntax.scala b/src/mmt-coq/src/info/kwarc/mmt/coq/Syntax.scala
index 08e25830b8..8d987d7111 100644
--- a/src/mmt-coq/src/info/kwarc/mmt/coq/Syntax.scala
+++ b/src/mmt-coq/src/info/kwarc/mmt/coq/Syntax.scala
@@ -83,7 +83,15 @@ object Coq {
val parent = current.asInstanceOf[Theory].path
state.controller.library.getImplicit(mp,parent) match {
case Some(_) =>
- case None => state.controller.add(PlainInclude(mp,parent),AtBegin)
+ case None => try {
+ state.controller.add(PlainInclude(mp,parent),AtBegin)
+ }
+ catch {
+ case e:ExtensionError =>
+ case e =>
+ println(e.getClass)
+ ???
+ }
}
}
def toGlobalName(uri : URI)(implicit state : TranslationState) : GlobalName = Try(coqtoomdoc(uri)(state.controller)).toOption match {
diff --git a/src/mmt-isabelle/README.md b/src/mmt-isabelle/README.md
index 58b3fa6db0..55d486ebf8 100644
--- a/src/mmt-isabelle/README.md
+++ b/src/mmt-isabelle/README.md
@@ -3,24 +3,27 @@ Isabelle/MMT
## Requirements
-Until the next official release after Isabelle2018 (presumably Isabelle2019
-in June 2019), Isabelle/MMT requires a suitable development snapshot from
-https://isabelle.sketis.net/devel/release_snapshot or a repository clone
-from http://isabelle.in.tum.de/repos/isabelle -- see also its
-`README_REPOSITORY` file with the all-important **Quick start in 30min**.
-
-In particular, the following versions from Nov-2018 should fit together:
-
- * Isabelle/1bee990d443c from https://isabelle.in.tum.de/repos/isabelle
- * AFP/1566e3fc3118 from https://bitbucket.org/isa-afp/afp-devel
- * MMT/55f297239a45 from https://github.com/UniFormal/MMT/commits/devel
- * MathHub/MMT/urtheories/efe7963422a3 from
+Isabelle/MMT requires a version of Isabelle that fits precisely to it. For the
+current stable release that is Isabelle2019 (June 2019). For intermediate
+repository versions, a suitable Isabelle development snapshot is required, e.g.
+from https://isabelle.sketis.net/devel/release_snapshot or a repository clone
+from https://isabelle.sketis.net/repos/isabelle -- see also its
+`README_REPOSITORY` file within the Isabelle repository with the all-important
+**Quick start in 30min**.
+
+In particular, the following versions from May-2019 should fit together:
+
+ * Isabelle/805250bb7363 from https://isabelle.sketis.net/repos/isabelle-release
+ * AFP/2170a6647f04 from https://isabelle.sketis.net/repos/afp-devel
+ * MMT/da1d91942801 from https://github.com/UniFormal/MMT/commits/devel
+ * MathHub/MMT/urtheories/01102b90e8bd from
https://gl.mathhub.info/MMT/urtheories/commits/devel
-The corresponding OMDoc content is available here:
+The corresponding OMDoc content is available here (commit messages refer to the
+underlying versions of Isabelle + AFP):
- * https://gl.mathhub.info/Isabelle/Distribution (version 3faa7f9c7742)
- * https://gl.mathhub.info/Isabelle/AFP (version 30a0dc372eaa)
+ * https://gl.mathhub.info/Isabelle/Distribution
+ * https://gl.mathhub.info/Isabelle/AFP
## Setup
@@ -52,8 +55,7 @@ Here are some example invocations of the main command-line tools:
* importer:
isabelle mmt_import -B ZF
- isabelle mmt_import HOL
- isabelle mmt_import -B HOL-Analysis
+ isabelle mmt_import -g main
isabelle mmt_import -o record_proofs=2 -B HOL-Proofs
* HTTP server to browse the results:
@@ -82,7 +84,7 @@ Recall that Isabelle consists of two processes:
Big examples require generous heap space for both, typically 30 GB. Note
that both platforms have a discontinuity when switching from short 32-bit
-pointers to full 64-bit ones: *4 GB* for Poly/ML and *32 GB* for Java. Going
+pointers to full 64-bit ones: *16 GB* for Poly/ML and *32 GB* for Java. Going
beyond that doubles the baseline memory requirements.
The subsequent setup works well for hardware with 64 GB of main memory:
@@ -100,7 +102,6 @@ The subsequent setup works well for hardware with 64 GB of main memory:
Examples:
isabelle mmt_import -a -X doc -X no_doc
- isabelle mmt_import -d '$AFP' -B HOL-Analysis -X doc -X no_doc -X slow
isabelle mmt_import -d '$AFP' -a -X doc -X no_doc -X slow
isabelle mmt_import -d '$AFP' -a -X doc -X no_doc -X very_slow
@@ -123,7 +124,7 @@ serves as catch-all pattern.
Since all sessions in AFP are guaranteed to belong to the chapter `AFP`, the
following works for Isabelle + AFP as one big import process:
- isabelle mmt_import -d '$AFP' -A content/MathHub -C AFP=AFP -C _=Distribution -a -X doc -X no_doc -X slow -x HOL-ODE-Numerics -x Diophantine_Eqns_Lin_Hom -x HLDE
+ isabelle mmt_import -d '$AFP' -A content/MathHub -C AFP=AFP -C _=Distribution -a -X doc -X no_doc -X very_slow
Note that other Isabelle applications may have their own chapter naming
scheme, or re-use official Isabelle chapter names; if nothing is specified,
@@ -149,7 +150,8 @@ shown to end-users by default.
## isabelle mmt_build
This is a thin wrapper for `sbt mmt/deploy` within the formal Isabelle
-environment and the correct directory in the MMT source tree. It also
+environment and the correct directory in the MMT source tree; it trims the
+resulting jar to avoid duplicates of Scala libraries. Furthermore, it
ensures that Isabelle/Scala has been properly bootstrapped beforehand (e.g.
when working from the Isabelle repository).
@@ -173,7 +175,6 @@ directories. Its command-line usage is as follows:
-a select all sessions
-d DIR include session directory
-g NAME select session group NAME
- -l NAME logic session name (default: "Pure")
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
-v verbose mode
-x NAME exclude session NAME and all descendants
@@ -205,10 +206,6 @@ descriptions).
Option `-v` enables verbose mode, similar to `isabelle build`.
-Option `-l` allows to use a different base logic images than the `Pure`. It
-rarely makes sense to use something else, because formal content between
-`Pure` and the base logic is *missing* from the import.
-
## isabelle mmt_server
diff --git a/src/mmt-isabelle/lib/Tools/mmt_build b/src/mmt-isabelle/lib/Tools/mmt_build
index 9bbc8b1cd0..327bd779fd 100755
--- a/src/mmt-isabelle/lib/Tools/mmt_build
+++ b/src/mmt-isabelle/lib/Tools/mmt_build
@@ -2,6 +2,22 @@
#
# DESCRIPTION: build and deploy MMT, using sbt
-isabelle_admin_build jars || exit $?
+set -e
-cd "$ISABELLE_MMT_ROOT/src" && sbt mmt/deploy "$@"
+# Isabelle Scala/Java components
+isabelle_admin_build jars
+
+# MMT build
+cd "$ISABELLE_MMT_ROOT/src"
+sbt mmt/deploy "$@"
+
+# strip material already present in Isabelle classpath
+cd "$ISABELLE_MMT_ROOT/deploy"
+rm -rf tmp
+mkdir tmp
+cd tmp
+isabelle_jdk jar xf ../mmt.jar
+rm -r isabelle org/jline org/tukaani scala
+isabelle_jdk jar cf ../mmt.jar .
+cd ..
+rm -rf tmp
diff --git a/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Importer.scala b/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Importer.scala
index 4b0ce6a96b..d505f67fbd 100644
--- a/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Importer.scala
+++ b/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Importer.scala
@@ -183,6 +183,8 @@ object Importer
element: isabelle.Thy_Element.Element_Command = isabelle.Thy_Element.atom(isabelle.Command.empty),
element_timing: isabelle.Document_Status.Overall_Timing = isabelle.Document_Status.Overall_Timing.empty,
command_kind: Option[String] = None,
+ document_tags: List[String] = Nil,
+ document_command: Boolean = false,
meta_data: isabelle.Properties.T = Nil,
heading: Option[Int] = None,
proof: Option[Proof_Text] = None,
@@ -197,8 +199,8 @@ object Importer
element.head.span.content.iterator.takeWhile(tok => !tok.is_begin).map(_.source).mkString
def header_relevant: Boolean =
header.nonEmpty &&
- (classes.nonEmpty || types.nonEmpty || consts.nonEmpty || facts.nonEmpty ||
- locales.nonEmpty || locale_dependencies.nonEmpty)
+ (document_command || classes.nonEmpty || types.nonEmpty || consts.nonEmpty ||
+ facts.nonEmpty || locales.nonEmpty || locale_dependencies.nonEmpty)
def command_name: String = element.head.span.name
@@ -503,14 +505,14 @@ object Importer
// document headings
for (i <- segment.heading) {
val item = make_dummy("heading", i)
- thy_draft.declare_item(item, segment.meta_data)
+ thy_draft.declare_item(item, segment.document_tags, segment.meta_data)
thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.section))
}
// classes
for (decl <- segment.classes) {
decl_error(decl.entity) {
- val item = thy_draft.declare_entity(decl.entity, segment.meta_data)
+ val item = thy_draft.declare_entity(decl.entity, segment.document_tags, segment.meta_data)
thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.universe))
val tp = Isabelle.Class()
add_constant(item, tp, None)
@@ -521,7 +523,7 @@ object Importer
for (decl <- segment.types) {
decl_error(decl.entity) {
val item = thy_draft.make_item(decl.entity, decl.syntax)
- thy_draft.declare_item(item, segment.meta_data)
+ thy_draft.declare_item(item, segment.document_tags, segment.meta_data)
thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.`type`))
if (thy_export.typedefs.exists(typedef => typedef.name == item.entity_name)) {
@@ -538,11 +540,18 @@ object Importer
for (decl <- segment.consts) {
decl_error(decl.entity) {
val item = thy_draft.make_item(decl.entity, decl.syntax, (decl.typargs, decl.typ))
- thy_draft.declare_item(item, segment.meta_data)
+ thy_draft.declare_item(item, segment.document_tags, segment.meta_data)
- thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.`object`))
- if (segment.is_axiomatization)
+ if (segment.is_axiomatization) {
thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.primitive))
+ }
+
+ if (decl.propositional) {
+ thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.predicate))
+ }
+ else {
+ thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.function))
+ }
val tp = Isabelle.Type.all(decl.typargs, thy_draft.content.import_type(decl.typ))
val df = decl.abbrev.map(rhs => Isabelle.Type.abs(decl.typargs, thy_draft.content.import_term(rhs)))
@@ -554,7 +563,7 @@ object Importer
val facts: List[Item] =
segment.facts_single.flatMap(decl =>
decl_error(decl.entity) {
- val item = thy_draft.declare_entity(decl.entity, segment.meta_data)
+ val item = thy_draft.declare_entity(decl.entity, segment.document_tags, segment.meta_data)
thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.statement))
if (segment.is_definition) {
@@ -603,7 +612,7 @@ object Importer
for (locale <- segment.locales) {
decl_error(locale.entity) {
val content = thy_draft.content
- val item = thy_draft.declare_entity(locale.entity, segment.meta_data)
+ val item = thy_draft.declare_entity(locale.entity, segment.document_tags, segment.meta_data)
thy_draft.rdf_triple(Ontology.unary(item.global_name, Ontology.ULO.theory))
val loc_name = item.local_name
val loc_thy = Theory.empty(thy.path.doc, thy.name / loc_name, None)
@@ -654,7 +663,7 @@ object Importer
// locale dependencies (inclusions)
for (dep <- segment.locale_dependencies if dep.is_inclusion) {
decl_error(dep.entity) {
- val item = thy_draft.declare_entity(dep.entity, segment.meta_data)
+ val item = thy_draft.declare_entity(dep.entity, segment.document_tags, segment.meta_data)
val content = thy_draft.content
val from = OMMOD(content.get_locale(dep.source).global_name.toMPath)
@@ -668,7 +677,22 @@ object Importer
}
}
- // RDF document
+ for (segment <- thy_export.segments) {
+ // information about recursion (from Spec_Rules): after all types have been exported
+ for (decl <- segment.consts) {
+ val item = thy_draft.content.get_const(decl.entity.name)
+ decl.primrec_types match {
+ case List(type_name) =>
+ val predicate = if (decl.corecursive) Ontology.ULO.coinductive_for else Ontology.ULO.inductive_on
+ thy_draft.rdf_triple(
+ Ontology.binary(item.global_name, predicate,
+ thy_draft.content.get_type(type_name).global_name))
+ case _ =>
+ }
+ }
+ }
+
+ // RDF document
{
val path = thy_archive.archive_rdf_path.ext("xz")
isabelle.Isabelle_System.mkdirs(path.dir)
@@ -806,6 +830,19 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
dirs = dirs ::: select_dirs, strict = true)
+ /* Isabelle + AFP library info */
+
+ private val isabelle_sessions: Set[String] =
+ isabelle.Sessions.load_structure(options, select_dirs = List(isabelle.Path.explode("$ISABELLE_HOME"))).
+ selection(isabelle.Sessions.Selection.empty).imports_graph.keys.toSet
+
+ private val optional_afp: Option[isabelle.AFP] =
+ {
+ if (isabelle.Isabelle_System.getenv("AFP_BASE").isEmpty) None
+ else Some(isabelle.AFP.init(options))
+ }
+
+
/* resources */
val dump_options: isabelle.Options =
@@ -1066,6 +1103,25 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
isabelle.RDF.meta_data(rendering.meta_data(element_range))
}
+ private def session_meta_data(name: String): isabelle.Properties.T =
+ {
+ if (isabelle_sessions(name)) List(isabelle.RDF.Property.license -> "BSD")
+ else {
+ (for { afp <- optional_afp; entry <- afp.sessions_map.get(name) }
+ yield entry.rdf_meta_data) getOrElse Nil
+ }
+ }
+
+ private def rdf_author_info(entry: isabelle.Properties.Entry): Option[isabelle.Properties.Entry] =
+ {
+ val (a, b) = entry
+ if (a == isabelle.RDF.Property.creator || a == isabelle.RDF.Property.contributor) {
+ Some(a -> isabelle.AFP.trim_mail(b))
+ }
+ else if (a == isabelle.RDF.Property.license) Some(entry)
+ else None
+ }
+
def read_theory_export(rendering: isabelle.Rendering): Theory_Export =
{
val snapshot = rendering.snapshot
@@ -1078,6 +1134,9 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
val syntax = resources.session_base.node_syntax(snapshot.version.nodes, node_name)
+ def document_command(element: isabelle.Thy_Element.Element_Command): Boolean =
+ isabelle.Document_Structure.is_document_command(syntax.keywords, element.head)
+
val node_timing =
isabelle.Document_Status.Overall_Timing.make(
snapshot.state, snapshot.version, snapshot.node.commands)
@@ -1085,7 +1144,10 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
val node_elements =
isabelle.Thy_Element.parse_elements(syntax.keywords, snapshot.node.commands.toList)
- val node_meta_data =
+ val theory_session_meta_data =
+ session_meta_data(theory_qualifier(node_name)).flatMap(rdf_author_info)
+
+ val theory_meta_data =
node_elements.find(element => element.head.span.name == isabelle.Thy_Header.THEORY) match {
case Some(element) => element_meta_data(rendering, element)
case None => Nil
@@ -1095,7 +1157,7 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
{
val relevant_elements =
node_elements.filter(element =>
- isabelle.Document_Structure.is_heading_command(element.head) ||
+ document_command(element) ||
element.head.span.is_kind(syntax.keywords, isabelle.Keyword.theory, false))
val relevant_ids =
@@ -1113,6 +1175,10 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
isabelle.Document_Status.Overall_Timing.make(
snapshot.state, snapshot.version, element.iterator.toList)
+ val element_range = commands_range(rendering.snapshot, element.head, element.last)
+
+ val document_tags = rendering.document_tags(element_range)
+
val meta_data = element_meta_data(rendering, element)
val heading =
@@ -1169,6 +1235,8 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
element = element,
element_timing = element_timing,
command_kind = syntax.keywords.kinds.get(element.head.span.name),
+ document_tags = document_tags,
+ document_command = document_command(element),
meta_data = meta_data,
heading = heading,
proof = proof,
@@ -1185,7 +1253,7 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
Theory_Export(node_name,
node_source = Source(snapshot.node.source),
node_timing = node_timing,
- node_meta_data = node_meta_data,
+ node_meta_data = isabelle.Library.distinct(theory_session_meta_data ::: theory_meta_data),
parents = theory.parents,
segments = segments,
typedefs = theory.typedefs)
@@ -1407,7 +1475,7 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
def rdf_triple(triple: isabelle.RDF.Triple): Unit =
_state.change({ case (content, triples) => (content, triple :: triples) })
- def declare_item(item: Item, props: isabelle.Properties.T)
+ def declare_item(item: Item, tags: List[String], props: isabelle.Properties.T)
{
_state.change(
{ case (content, triples) =>
@@ -1420,8 +1488,15 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
val source_ref =
item.source_ref.map(sref =>
Ontology.binary(item.global_name, Ontology.ULO.sourceref, sref.toURI))
+ val important =
+ tags.reverse.collectFirst({
+ case isabelle.Markup.Document_Tag.IMPORTANT => true
+ case isabelle.Markup.Document_Tag.UNIMPORTANT => false
+ }).toList.map(b =>
+ Ontology.unary(item.global_name, if (b) Ontology.ULO.important else Ontology.ULO.unimportant))
val properties = props.map({ case (a, b) => Ontology.binary(item.global_name, a, b) })
- val triples1 = name :: specs ::: source_ref.toList ::: properties.reverse ::: triples
+
+ val triples1 = name :: specs ::: source_ref.toList ::: important ::: properties.reverse ::: triples
(content1, triples1)
})
}
@@ -1458,10 +1533,13 @@ Usage: isabelle mmt_import [OPTIONS] [SESSIONS ...]
type_scheme = type_scheme)
}
- def declare_entity(entity: isabelle.Export_Theory.Entity, props: isabelle.Properties.T): Item =
+ def declare_entity(
+ entity: isabelle.Export_Theory.Entity,
+ tags: List[String],
+ props: isabelle.Properties.T): Item =
{
val item = make_item(entity)
- declare_item(item, props)
+ declare_item(item, tags, props)
item
}
diff --git a/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Ontology.scala b/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Ontology.scala
index a333ea00bc..4bcc894e80 100644
--- a/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Ontology.scala
+++ b/src/mmt-isabelle/src/info/kwarc/mmt/isabelle/Ontology.scala
@@ -37,11 +37,12 @@ object Ontology
/* unaries */
val `type` = ulo("type") // type constructors
- val `object` = ulo("object") // term constants
+ val function = ulo("function") // term constants
val statement = ulo("statement") // fact items (thm)
val universe = ulo("universe") // type classes (primitive entity)
val name = ulo("name") // external entity name (xname)
+ val predicate = ulo("predicate") // term constants with propositional body type
val theory = ulo("theory") // theory, locale (subsumes type classes as logical specification)
@@ -58,6 +59,9 @@ object Ontology
val derived = ulo("derived") // HOL typedefs, proven statements
val experimental = ulo("experimental") // proof contains "sorry"
+ val important = ulo("important") // command is tagged as "important"
+ val unimportant = ulo("unimportant") // command is tagged as "unimportant"
+
/* binaries */
@@ -65,6 +69,9 @@ object Ontology
val instance_of = ulo("instance-of") // locale interpretation
+ val inductive_on = ulo("inductive-on") // const is specified via primitive recursion on type
+ val coinductive_for = ulo("coinductive-for") // const is specified via primitive co-recursion for type
+
val specifies = ulo("specifies") // theory/locale declares item
val specified_in = ulo("specified-in") // inverse of "specifies"
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveDefinitions.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveDefinitions.scala
index 4849f58444..f6b68c1b8b 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveDefinitions.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveDefinitions.scala
@@ -11,7 +11,7 @@ import frontend.Controller
import info.kwarc.mmt.lf._
import InternalDeclaration._
import InternalDeclarationUtil._
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
import TermConstructingFeatureUtil._
import inductiveUtil._
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveMatch.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveMatch.scala
index 21bffd496e..6fddea24e3 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveMatch.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveMatch.scala
@@ -13,7 +13,7 @@ import InternalDeclaration._
import InternalDeclarationUtil._
import TermConstructingFeatureUtil._
import inductiveUtil._
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
/** theories as a set of types of expressions */
class InductiveMatch extends StructuralFeature("match") with TypedParametricTheoryLike {
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveProofDefinitions.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveProofDefinitions.scala
index 90ed7a3c77..3d68c2dd11 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveProofDefinitions.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveProofDefinitions.scala
@@ -12,7 +12,7 @@ import info.kwarc.mmt.lf._
import InternalDeclaration._
import InternalDeclarationUtil._
import TermConstructingFeatureUtil._
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
import inductiveUtil._
/** theories as a set of types of expressions */
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveTypes.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveTypes.scala
index bab5aeef28..495f372767 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveTypes.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InductiveTypes.scala
@@ -11,6 +11,7 @@ import frontend.Controller
import info.kwarc.mmt.lf._
import InternalDeclaration._
import InternalDeclarationUtil._
+import StructuralFeatureUtils._
import StructuralFeatureUtil._
object inductiveUtil {
@@ -38,7 +39,6 @@ class InductiveTypes extends StructuralFeature("inductive") with ParametricTheor
* @param dd the derived declaration from which the inductive type(s) are to be constructed
*/
override def check(dd: DerivedDeclaration)(implicit env: ExtendedCheckingEnvironment) {}
-
/**
* Elaborates an declaration of one or multiple mutual inductive types into their declaration,
@@ -50,10 +50,20 @@ class InductiveTypes extends StructuralFeature("inductive") with ParametricTheor
def elaborate(parent: ModuleOrLink, dd: DerivedDeclaration) = {
val context = Type.getParameters(dd)
implicit val parentTerm = dd.path
+
+ val decls = parseInternalDeclarations(dd, controller, Some(context))
+ elaborateDeclarations(context, decls)
+ }
+
+ /** Elaborates an derived declaration D using the inductive feature. This is used to reuse the functionality of this feature in different features, speciafically the reflection feature.
+ * @param context The context of D
+ * @param parentTerm The path to D, used as prefix for the external declarations
+ * @param decls The internal declaration of the D
+ */
+ def elaborateDeclarations(context: Context, decls: List[InternalDeclaration])(implicit parentTerm: GlobalName) : Elaboration = {
// to hold the result
var elabDecls : List[Constant] = Nil
- val decls = parseInternalDeclarations(dd, controller, Some(context))
val tpdecls = tpls(decls)
val tmdecls = tmls(decls)
val constrdecls = constrs(tmdecls)
@@ -69,29 +79,22 @@ class InductiveTypes extends StructuralFeature("inductive") with ParametricTheor
* some of the (in)equality axioms would be ill-typed because
* the type system already forces elements of different instances of a dependent type to be unequal and of unequal type
*/
- elabDecls = elabDecls.reverse ::: tmdecls.flatMap(x => noConf(x, tmdecls, types)(dd.path))
+ elabDecls = elabDecls.reverse ::: tmdecls.flatMap(x => noConf(x, tmdecls, types)(parentTerm))
// the no junk axioms
- elabDecls ++= noJunks(decls, context)(dd.path)
+ elabDecls ++= noJunks(decls, context)(parentTerm)
// the testers
- elabDecls ++= testers(tmdecls, tpdecls, decls, context)(dd.path)
+ elabDecls ++= testers(tmdecls, tpdecls, decls, context)(parentTerm)
// the unappliers
- elabDecls ++= unappliers(constrdecls, tpdecls, decls, context)(dd.path)
+ elabDecls ++= unappliers(constrdecls, tpdecls, decls, context)(parentTerm)
// the inductive proof declarations
- elabDecls ++= indProofs(tpdecls, constrdecls, context)(dd.path)
+ elabDecls ++= indProofs(tpdecls, constrdecls, context)(parentTerm)
- //elabDecls foreach {d =>log(defaultPresenter(d)(controller))} //This typically prints all external declarations several times
- new Elaboration {
- def domain = elabDecls map {d => d.name}
- def getO(n: LocalName) = {
- elabDecls.find(_.name == n) foreach(c => log(defaultPresenter(c)(controller)))
- elabDecls.find(_.name == n)
- }
- }
- }
+ externalDeclarationsToElaboration(elabDecls)
+}
/** Check whether the TermLevel has a higher order argument of an inductively defined type
* In that case an error is thrown
@@ -399,4 +402,15 @@ class InductiveTypes extends StructuralFeature("inductive") with ParametricTheor
}
}
+object InductiveTypes {
+ /** Elaborates an derived declaration D using the inductive feature. This is used to reuse the functionality of this feature in different features, speciafically the reflection feature.
+ * @param context The context of D
+ * @param parentTerm The path to D, used as prefix for the external declarations
+ * @param decls The internal declaration of the D
+ */
+ def elaborateDeclarations(context: Context, decls: List[InternalDeclaration])(implicit parentTerm: GlobalName) : Elaboration = {
+ InductiveTypes.elaborateDeclarations(context, decls)
+ }
+}
+
object InductiveRule extends StructuralFeatureRule(classOf[InductiveTypes], "inductive")
\ No newline at end of file
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InternalDeclaration.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InternalDeclaration.scala
index 6166833f9a..972a9febd5 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InternalDeclaration.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/InternalDeclaration.scala
@@ -188,7 +188,7 @@ object InternalDeclaration {
}
}
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
/** helper class for the various declarations in an inductive type */
sealed abstract class InternalDeclaration {
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Quotients.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Quotients.scala
index d93a55b43c..11882c6088 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Quotients.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Quotients.scala
@@ -9,7 +9,7 @@ import modules._
import frontend.Controller
import info.kwarc.mmt.lf._
import InternalDeclaration._
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
import InternalDeclarationUtil._
import info.kwarc.mmt.api.utils.MMT_TODO
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/RecordDefinitions.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/RecordDefinitions.scala
index 3ce9fdbb7a..e24223e826 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/RecordDefinitions.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/RecordDefinitions.scala
@@ -13,11 +13,10 @@ import InternalDeclaration._
import InternalDeclarationUtil._
import RecordUtil._
+import symbols.StructuralFeatureUtil._
+import StructuralFeatureUtils._
import TermConstructingFeatureUtil._
-import StructuralFeatureUtil._
-
-
/** theories as a set of types of expressions */
class RecordDefinitions extends StructuralFeature("record_term") with TypedParametricTheoryLike {
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Records.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Records.scala
index 16f66953ac..61a40d50e9 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Records.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Records.scala
@@ -11,7 +11,7 @@ import frontend.Controller
import info.kwarc.mmt.lf._
import InternalDeclaration._
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
import InternalDeclarationUtil._
object RecordUtil {
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Reflections.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Reflections.scala
new file mode 100644
index 0000000000..4396bc798b
--- /dev/null
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Reflections.scala
@@ -0,0 +1,50 @@
+package info.kwarc.mmt.lf.structuralfeatures
+
+import info.kwarc.mmt.api._
+import objects._
+import symbols._
+import notations._
+import checking._
+import modules._
+import frontend.Controller
+
+import info.kwarc.mmt.lf._
+import InternalDeclaration._
+import InternalDeclarationUtil._
+import StructuralFeatureUtils._
+import StructuralFeatureUtil._
+import inductiveUtil._
+import structuralfeatures.InductiveTypes._
+
+/** theories as a set of types of expressions */
+class Reflections extends StructuralFeature("reflect") with TypedParametricTheoryLike {
+
+ /**
+ * Checks the validity of the inductive type(s) to be constructed
+ * @param dd the derived declaration from which the inductive type(s) are to be constructed
+ */
+ override def check(dd: DerivedDeclaration)(implicit env: ExtendedCheckingEnvironment) {}
+
+ /**
+ * Elaborates an declaration of one or multiple mutual inductive types into their declaration,
+ * as well as the corresponding no confusion and no junk axioms
+ * Constructs a structure whose models are exactly the (not necessarily initial) models of the declared inductive types
+ * @param parent The parent module of the declared inductive types
+ * @param dd the derived declaration to be elaborated
+ */
+ def elaborate(parent: ModuleOrLink, dd: DerivedDeclaration) = {
+ val (indDefPath, context, indParams) = ParamType.getParams(dd)
+ val (indD, indCtx) = controller.library.get(indDefPath) match {
+ case indD: DerivedDeclaration if (indD.feature == "inductive") => (indD, Type.getParameters(indD))
+ case d: DerivedDeclaration => throw LocalError("the referenced derived declaration is not of the feature inductive but of the feature "+d.feature+".")
+ case _ => throw LocalError("Expected definition of corresponding inductively-defined types at "+indDefPath.toString()
+ +" but no derived declaration found at that location.")
+ }
+ val decls = parseInternalDeclarations(indD, controller, Some(context))(dd.path)
+
+
+ structuralfeatures.InductiveTypes.elaborateDeclarations(context, decls)(dd.path)
+ }
+}
+
+object ReflectionRule extends StructuralFeatureRule(classOf[Reflections], "reflect")
\ No newline at end of file
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/StructuralFeaturesUtil.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/StructuralFeaturesUtil.scala
index 40737afcfa..9d8663c388 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/StructuralFeaturesUtil.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/StructuralFeaturesUtil.scala
@@ -12,7 +12,7 @@ import info.kwarc.mmt.lf._
import InternalDeclarationUtil._
import InternalDeclaration._
-object StructuralFeatureUtil {
+object StructuralFeatureUtils {
val theory: MPath = LF._base ? "DHOL"
object Ded {
val path = LF._base ? "Ded" ? "DED"
@@ -84,7 +84,7 @@ object StructuralFeatureUtil {
}
}
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
object TermConstructingFeatureUtil {
def correspondingDecl(dd: DerivedDeclaration, d: LocalName): Option[Constant] = {
dd.getO(d) map {
diff --git a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Subtypes.scala b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Subtypes.scala
index 101fdb0b11..b28cf203a3 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Subtypes.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/lf/structuralfeatures/Subtypes.scala
@@ -9,7 +9,7 @@ import modules._
import frontend.Controller
import info.kwarc.mmt.lf._
import InternalDeclaration._
-import StructuralFeatureUtil._
+import StructuralFeatureUtils._
import InternalDeclarationUtil._
import info.kwarc.mmt.api.utils.MMT_TODO
diff --git a/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/Combinators.scala b/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/Combinators.scala
index 60c0b194c9..6c6c6054a0 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/Combinators.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/Combinators.scala
@@ -314,11 +314,11 @@ object ComputeRename extends ComputationRule(Rename.path) {
trait Pushout extends ConstantScala {
val parent = Combinators._path
- def apply(d1: Term, r1: List[Term], d2: Term, r2: List[Term]) = {
- path(d1 :: r1 ::: List(d2) ::: r2)
+ def apply(d1: Term, r1: List[Term], d2: Term, r2: List[Term], over : Term) = {
+ path(d1 :: r1 ::: List(d2) ::: r2 ::: List(over))
}
- def unapply(t: Term): Option[(Term,List[Term],Term,List[Term])] = t match {
+ def unapply(t: Term): Option[(Term,List[Term],Term,List[Term],Term)] = t match {
case OMA(OMS(this.path), args) =>
var left = args
val d1 = left.headOption.getOrElse(return None)
@@ -329,14 +329,16 @@ trait Pushout extends ConstantScala {
left = left.tail
val r2 = left.takeWhile(t => Rename1.unapply(t).isDefined)
left = left.drop(r2.length)
+ val over = left.headOption.getOrElse(return None)
+ left = left.tail
if (left.nonEmpty) return None
- Some((d1,r1,d2,r2))
+ Some((d1,r1,d2,r2,over))
case _ => None
}
}
object PushoutUtils {
- case class BranchInfo(anondiag: AnonymousDiagram, dom: LocalName, distNode: DiagramNode,
+ case class BranchInfo(anondiag: AnonymousDiagram, distNode: DiagramNode,
distTo: List[DiagramArrow], renames: List[(LocalName,Term)]) {
def extend(label: LocalName, po: DiagramNode) = {
//val morph = new AnonymousMorphism(po.theory.decls.diff(distNode.theory.decls))
@@ -347,16 +349,14 @@ object PushoutUtils {
def collectBranchInfo(solver: CheckingCallback,d: Term,rename : List[Term])(implicit stack: Stack, history: History): Option[BranchInfo] = {
val ren = Common.asSubstitution(rename)
val ad = Common.asAnonymousDiagram(solver, d).getOrElse(return None)
- val dom = ad.getDistArrow.getOrElse(return None).from
val distNode = ad.getDistNode.getOrElse(return None)
val distTo = ad.getDistArrowsTo(distNode.label)
- Some(PushoutUtils.BranchInfo(ad,dom,distNode,distTo,ren))
+ Some(PushoutUtils.BranchInfo(ad,distNode,distTo,ren))
}
}
object Combine extends Pushout {
val name = "combine"
-
val nodeLabel = LocalName("pres")
val arrowLabel1 = LocalName("extend1")
val arrowLabel2 = LocalName("extend2")
@@ -366,59 +366,59 @@ object Combine extends Pushout {
object ComputeCombine extends ComputationRule(Combine.path) {
def apply(solver: CheckingCallback)(tm: Term, covered: Boolean)(implicit stack: Stack, history: History): Simplifiability = {
- val Combine(d1,r1,d2,r2) = tm
-
- /* Calculate branch info */
- val b1 : BranchInfo = PushoutUtils.collectBranchInfo(solver,d1,r1).getOrElse(return Recurse)
- val b2 : BranchInfo = PushoutUtils.collectBranchInfo(solver,d2,r2).getOrElse(return Recurse)
-
- /* Finding the common source (\Gamma) */
- val extArrows1 = b1.distTo.filter(_.label.toString.contains("extend"))
- val extArrows2 = b2.distTo.filter(_.label.toString.contains("extend"))
- val List(sN1,sN2) = List(b1,b2).map(b => b.distTo.filter(_.label.toString.contains("extend")).map(a => a.from))
- val sourceName : LocalName = (sN1 intersect sN2).last
- val source : DiagramNode = sourceName match {
- case Common.ExistingName(s) => b1.anondiag.getNode(sourceName).get
- case _ => return Recurse
- }
+ /**************** Input pieces *****************/
+ val Combine(d1, r1, d2, r2, over) = tm
+ val List(ad1, ad2, ad_over) = List(d1, d2, over).map(d => Common.asAnonymousDiagram(solver, d).get) // TODO: Handle errors here
+
+ /**************** Calculate the views from the source to the two nodes (after renaming) ****************/
+ val List(renames1,renames2) : List[List[OML]] = List(r1,r2).map{r => Common.asSubstitution(r).map{case (o,n) => OML(o,None,Some(n))}}
+ val List(in_view1,in_view2) : List[List[OML]] = List(ad1,ad2).map{d => d.viewOf(ad_over.getDistNode.get, d.getDistNode.get)} // TODO: Handle errors here
+ val view1: List[OML] = ad1.compose(in_view1,renames1)
+ val view2: List[OML] = ad2.compose(in_view2,renames2)
+
+ /**************** Check The Guard *******************/
+ /* - Now, view1 and view2 have the list of assignments from the source to the targets (after applying the renames)
+ * - We compare them to make sure the names matches
+ * - Order does not matter, so we convert the list to a set.
+ * */
+ if (view1.toSet != view2.toSet) throw (new GeneralError("Wrong renames"))
+
+ /**************** Define the new theory ***************/
+ // apply the rename
+ def view_as_sub(v : List[OML]) = v.map{case OML(o,_,Some(n),_,_) => (o,n)}
+ val List(renThry1,renThry2) : List[AnonymousTheory] = List(ad1,ad2).map(b => b.getDistNode.get.theory.rename(view_as_sub(view1):_*))
+
+ /* Calculate the pushout as distinct union
+ * TODO: Find a better way to choose the meta-theory
+ * */
+ val new_decls : List[OML] = (renThry1.decls union renThry2.decls).distinct
+ val new_theory : AnonymousTheory = new AnonymousTheory(ad1.getDistNode.get.theory.mt,new_decls)
+
+ /****************** Build the new diagram *******************/
+ val dist_node: DiagramNode = DiagramNode(Combine.nodeLabel, new_theory)
+ // TODO: The assignments in the arrow need to be the identity
+ val impl_arrow = DiagramArrow(Combine.arrowLabel, ad_over.distNode.get, dist_node.label, new AnonymousMorphism(Nil), true)
- /* Applying renames on the theories */
- val List(renamedThry1,renamedThry2) = List(b1,b2).map(b => b.distNode.theory.rename(b.renames:_*))
+ /* This is used in case theories are not built in a tiny-theories way. In this case, we need to generate names for the arrows. */
+ val jointDiag: AnonymousDiagram = (Common.prefixLabels(ad1, LocalName("left")) union Common.prefixLabels(ad2, LocalName("right")))
- /* Checking for the guard:
- * If two decls are renamed to the same name, they should be the same in the source node */
- // Going through the rename arrows:
- val List(view_ad1,view_ad2) = List(b1,b2).map(b => b.anondiag.viewOf(source,b.distNode))
- if(view_ad1 != view_ad2) new GeneralError("Wrong renames")
+ val List(map1,map2) = List(view1,view2).map{v => Common.asSubstitution(v).map { case (o, n) => OML(o, None, Some(n))}}
- /* Theory with the new declarations */
- val new_thy = renamedThry1 union renamedThry2
+ val arrow1 = DiagramArrow(Combine.arrowLabel1, ad1.distNode.get, dist_node.label, new AnonymousMorphism(map1), false)
+ val arrow2 = DiagramArrow(Combine.arrowLabel2, ad2.distNode.get, dist_node.label, new AnonymousMorphism(map2), false)
- /* TODO: How to choose the meta-theory? We need to have a constraint that one of them contains the other? */
- val result_node = DiagramNode(Combine.nodeLabel, new_thy)
- val diag = DiagramArrow(Combine.arrowLabel, source.label, result_node.label, new AnonymousMorphism(Nil), true)
+ val result_diag = new AnonymousDiagram(jointDiag.nodes ::: List(dist_node), jointDiag.arrows ::: List(arrow1, arrow2, impl_arrow), Some(dist_node.label))
- /* This is used in case theories are not built in a tiny-theories way. In this case, we need to generate names for the arrows. */
- val jointDiag = Common.prefixLabels(b1.anondiag, LocalName("left")) union Common.prefixLabels(b2.anondiag, LocalName("right"))
- /* val List(dom1,dom2) = List(ad1,ad2) map {d =>
- val fromT = d.getDistArrow.getOrElse(return Recurse).from
- fromT match {
- case Common.ExistingName(p) => p
- case _ => return Recurse
- }
- }*/
- val arrow1 = b1.extend(Combine.arrowLabel1, result_node)
- val arrow2 = b2.extend(Combine.arrowLabel2, result_node)
- val ad = new AnonymousDiagram(jointDiag.nodes ::: List(result_node), jointDiag.arrows:::List(arrow1,arrow2, diag), Some(result_node.label))
- Simplify(ad.toTerm)
+ Simplify(result_diag.toTerm)
}
+
}
object Mixin extends Pushout {
val name = "mixin"
val nodeLabel = LocalName("pres")
- val arrowLabel1 = LocalName("extend")
+ val arrowLabel1 = LocalName("extend1")
val arrowLabel2 = LocalName("view")
val arrowLabel = LocalName("extend")
}
@@ -431,7 +431,7 @@ object Mixin extends Pushout {
* inclusion from B to Translate(m,T)
*/
-
+/*
object ComputeMixin extends ComputationRule(Mixin.path) {
def apply(solver: CheckingCallback)(tm: Term, covered: Boolean)(implicit stack: Stack, history: History): Simplifiability = {
/* Both combine and translate takes two diagrams and two renames. The difference is in the content of the second diagram */
@@ -451,8 +451,6 @@ object ComputeMixin extends ComputationRule(Mixin.path) {
val view_renamed : AnonymousMorphism = new AnonymousMorphism(Common.applySubstitution(view.morphism.decls,ren2))
val view_from : DiagramNode = ad2.getNode(view.from).getOrElse(return Recurse)
- val iter = view.morphism.decls.map(oml => oml.df)
-
/** TODO: THis part needs to be CHANGED */
val mor = view.morphism.decls.map {oml => (LocalName(oml.name),oml.df.getOrElse(return Recurse))}
val morAsSub = view.morphism.decls.flatMap{oml => oml.df.toList.map {d => Sub(oml.name, d)}}
@@ -461,32 +459,16 @@ object ComputeMixin extends ComputationRule(Mixin.path) {
val new_decls : List[OML] = Common.applySubstitution(d1_dN_renamed.decls,mor)
val pushout = new AnonymousTheory(d1_dN.theory.mt,new_decls)
- /*
- var pushout : AnonymousTheory = new AnonymousTheory(d1_dN_renamed.mt,Nil)
- d1_dN_renamed.decls.foreach {oml =>
- // TODO: I Am here, check is this is correct
- if (view_renamed.decls.contains()) {
- val omlT = (view_renamed.decls.dropWhile(_.name == oml.name).head.df).getOrElse(return Recurse).asInstanceOf[OML]
- pushout = pushout.add(omlT)
- // if (d1_dN_renamed.isDeclared(oml.name)) {
- // solver.error("pushout not defined because of name clash: " + oml.name)
- // return Recurse
- }else {
- val omlT = translator(oml, stack.context).asInstanceOf[OML]
- pushout = pushout.add(omlT)
- }
- }*/
-
val node = DiagramNode(Mixin.nodeLabel,pushout)
// TODO: The morphisms needs more thinking
val arrow1 = DiagramArrow(Mixin.arrowLabel1,d1_dN.label,node.label,view.morphism,false)
val arrow2 = DiagramArrow(Mixin.arrowLabel2,d2_dN.label,node.label,new AnonymousMorphism(Nil),false)
- val dA = DiagramArrow(Mixin.arrowLabel,ad2.getDistArrow.getOrElse(return Recurse).from,node.label,view.morphism,true)
+ val dA = DiagramArrow(Mixin.arrowLabel,view.from,node.label,view.morphism,true)
val result = new AnonymousDiagram(ad1.nodes:::ad2.nodes:::List(node),ad1.arrows:::ad2.arrows:::List(arrow1,arrow2,dA),Some(node.label))
Simplify(result.toTerm)
}
}
-
+*/
// TODO better name
/** see [[Mixin]] */
object Expand extends BinaryConstantScala(Combinators._path, "expand")
diff --git a/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/DiagramDefinitions.scala b/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/DiagramDefinitions.scala
index 50e5019ddc..3206fa0c13 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/DiagramDefinitions.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/moduleexpressions/DiagramDefinitions.scala
@@ -19,7 +19,7 @@ class DiagramDefinition extends ModuleLevelFeature(DiagramDefinition.feature) {
def check(dm: DerivedModule)(implicit env: ExtendedCheckingEnvironment) {}
override def modules(dm: DerivedModule) = {
- val diag = dm.dfC.normalized.getOrElse {throw LocalError("no definiens found")}
+ val diag : Term = dm.dfC.normalized.getOrElse {throw LocalError("no definiens found")}
val ad = diag match {
case AnonymousDiagramCombinator(ad) => ad
case df => throw LocalError("definiens not a diagram: " + controller.presenter.asString(df)) // TODO should use proper error handler
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/Names.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/Names.scala
index 158167802b..6dc39b83a3 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/sql/Names.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/Names.scala
@@ -43,10 +43,14 @@ object Codecs extends TheoryScala {
object SchemaLang extends TheoryScala {
val _base = MathData._base
val _name = LocalName("MDDL")
- val foreignKey = new Tagger(_path ? "foreignKey")
- val opaque = new Tagger(_path ? "opaque")
- val hidden = new Tagger(_path ? "hidden")
- val collection = new Tagger(_path ? "collection")
- val schemaGroup = _path ? "schemaGroup"
val datasetName = _path ? "datasetName"
+ val schemaGroup = _path ? "schemaGroup"
+ object foreignKey extends BinaryLFConstantScala(_path, "foreignKey")
+ val opaque = new Tagger(_path ? "opaque")
+ val hidden = new Tagger(_path ? "hidden")
+ val collection = new Tagger(_path ? "collection")
+
+ val datasetNameAnnotator = new StringAnnotator(datasetName)
+ val schemaGroupAnnotator = new StringAnnotator(schemaGroup)
+ val foreignKeyAnnotator = new TermAnnotator(foreignKey.path)
}
\ No newline at end of file
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/SQLBridge.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/SQLBridge.scala
index d110b9c1e2..a297b9ad33 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/sql/SQLBridge.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/SQLBridge.scala
@@ -22,14 +22,18 @@ class SQLBridge(controller: Controller, rules: RuleSet, commProps: List[Commutin
val logPrefix = "sql"
def theoryToTable(t: Theory): Table = {
+ val datasetName: Option[String] = SchemaLang.datasetNameAnnotator.get(t)
+ val schemaGroup: Option[String] = SchemaLang.schemaGroupAnnotator.get(t)
+ val includes = t.getAllIncludesWithoutMeta.map(_._1)
val cols = t.getConstants flatMap {
case c: Constant => constantToColumn(c).toList
}
- Table(t.path, cols)
+ Table(t.path, datasetName, schemaGroup, cols, includes)
}
-
+
def constantToColumn(c: Constant): Option[Column] = {
val codecTerm = Codecs.codecAnnotator.get(c).getOrElse(return None) // no codec given
+ val foreignKeyTerm: Option[MPath] = SchemaLang.foreignKeyAnnotator.get(c).map(_.toMPath)
val context = Context(c.home.toMPath)
val mathType = c.tp.getOrElse {
log("no mathematical type given " + c.name)
@@ -56,25 +60,24 @@ class SQLBridge(controller: Controller, rules: RuleSet, commProps: List[Commutin
return None
}
- val isNullable = false //TODO
- val isForeignKey = SchemaLang.foreignKey.get(c)
val isOpaque = SchemaLang.opaque.get(c)
val isHidden = SchemaLang.hidden.get(c)
val collection = if (SchemaLang.collection.get(c)) {
Some(CollectionInfo(c.path,c.metadata))
- } else
+ } else
None
- val col = Column(c.path, mathType, codecTermChecked, dbtype, isForeignKey, isOpaque, !isHidden, collection)
+ // TODO foreign key
+ val col = Column(c.path, mathType, codecTermChecked, dbtype, foreignKeyTerm, isOpaque, !isHidden, collection)
Some(col)
}
-
+
private def defaultCodec(rt: RealizedType): Term = rt.synType match {
case OMS(MathData.bool) => OMS(Codecs.BoolIdent)
case OMS(MathData.int) => OMS(Codecs.IntIdent)
case OMS(MathData.string) => OMS(Codecs.StringIdent)
case OMS(MathData.uuid) => OMS(Codecs.UUIDIdent)
}
-
+
/**
* converts a term over a schema theory into the corresponding SQL expression and the codec that allows decoding it
*/
@@ -105,7 +108,7 @@ class SQLBridge(controller: Controller, rules: RuleSet, commProps: List[Commutin
val (params,argsToEncode) = args.splitAt(cp.mathParams.length)
/* TODO recursive call should alternate with matching
if the free variables in an inCodec have already been solved, they are passed as the expected codec
- if an expected codec is passed, it is matched against the outCodec */
+ if an expected codec is passed, it is matched against the outCodec */
val (argsE, argsC) = args.map {a => termToExpr(table, a, None)}.unzip
val res = matcher(context, cp.context) {eq =>
((params zip cp.mathParams) forall {case (a,b) => eq(a,b)}) &&
@@ -122,9 +125,9 @@ class SQLBridge(controller: Controller, rules: RuleSet, commProps: List[Commutin
throw CannotTranslate(tm)
}
}
-
+
def termToFilter(table: Table, tm: Term): Filter = {
- // determine type and codec for each subterm, then translate according to commutativity annotations
+ // determine type and codec for each subterm, then translate according to commutativity annotations
val (e,_) = termToExpr(table, tm, None)
Filter(e)
}
@@ -143,8 +146,8 @@ object SQLBridge {
case b: BaseType[_] => OMS(utils.invlistmap(basetypes, b).get)
case ArrayType(t) => array(typeToTerm(t))
}
-
- /** MPath of example schema */
+
+ /** MPath of example schema */
val example = SchemaLang._base ? "Example"
/** convert a theory to a table */
def test(thyP: MPath) = {
@@ -160,6 +163,18 @@ object SQLBridge {
case e:Error => println(e.toStringLong)
}
}
+ def test2(thyP: MPath, controller: Controller) = {
+ try {
+ val rules = RuleSet.collectRules(controller, Context(thyP))
+ val bridge = new SQLBridge(controller, rules, Nil)
+ controller.handleLine("log+ " + bridge.logPrefix)
+ val thy = controller.globalLookup.getAs(classOf[Theory], thyP)
+ val table = bridge.theoryToTable(thy)
+ table
+ } catch {
+ case e:Error => println(e.toStringLong)
+ }
+ }
}
case class CannotTranslate(tm: Term) extends Throwable
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/Syntax.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/Syntax.scala
index 39a390082c..267df820f2 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/sql/Syntax.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/Syntax.scala
@@ -7,10 +7,13 @@ import scala.language.existentials
/**
* @param path the MMT name of the table
+ * @param datasetName table description metadatum
+ * @param schemaGroup metadatum
* @param columns sequence of all columns
* @param collections sequence of all collections
+ * @param includes list of all includes (in particular, referenced tables)
*/
-case class Table(path: MPath, columns: Seq[Column]) {
+case class Table(path: MPath, datasetName: Option[String], schemaGroup: Option[String], columns: Seq[Column], includes: List[MPath]) {
/** db name of the table, underscore style */
def name = path.name.toString
/** retrieve all columns that are collections */
@@ -22,7 +25,7 @@ case class Table(path: MPath, columns: Seq[Column]) {
* @param mathType the mathematical type of the column
* @param codec the codec expression for en/de-coding functions between them
* @param dbtype database type
- * @param isForeignKey
+ * @param foreignKey
* @param opaque no meaningful operations on column except for (in)equality (annotated in schema)
* @param isDisplayedByDefault (annotated in schema) whether the column gets displayed in the default view of the result set
* later we could add: displayName, description
@@ -31,7 +34,7 @@ case class Table(path: MPath, columns: Seq[Column]) {
* The column is anullable if it is not the primaryKey
*/
case class Column(path: GlobalName, mathType: Term, codec: Term, dbtype: SQLSyntax.Type[_],
- isForeignKey: Boolean, opaque: Boolean, isDisplayedByDefault: Boolean, collection: Option[CollectionInfo]) {
+ foreignKey: Option[MPath], opaque: Boolean, isDisplayedByDefault: Boolean, collection: Option[CollectionInfo]) {
/** the db name of the column */
def name = path.name.toString
}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeFile.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeFile.scala
new file mode 100644
index 0000000000..1cc8714ec8
--- /dev/null
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeFile.scala
@@ -0,0 +1,23 @@
+package info.kwarc.mmt.sql.codegen
+
+import java.io.PrintWriter
+
+case class CodeFile(replacements: Map[String, String], templatePath: String, destinationPath: Option[String] = None) {
+
+ def writeToFile(write: Boolean): Unit = {
+ // TODO should it be line by line?
+ if (write) {
+ val handle = scala.io.Source.fromFile(templatePath)
+ val result = try {
+ replacements.foldLeft(handle.mkString)((str, mapItem) => {
+ str.replaceAllLiterally(mapItem._1, mapItem._2)
+ })
+ } finally handle.close()
+
+ val actualDestination = destinationPath.getOrElse(templatePath)
+ val pw = new PrintWriter(new java.io.File(actualDestination))
+ try pw.write(result) finally pw.close()
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeGenerator.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeGenerator.scala
index bc0763bdd9..c075501e2a 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeGenerator.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeGenerator.scala
@@ -1,88 +1,48 @@
package info.kwarc.mmt.sql.codegen
-import java.io.PrintWriter
-
-import info.kwarc.mmt.api.GlobalName
+import info.kwarc.mmt.api.MPath
import info.kwarc.mmt.api.frontend.Controller
import info.kwarc.mmt.api.modules.Theory
-import info.kwarc.mmt.api.objects.{OMA, OMS}
import info.kwarc.mmt.sql.{Column, SQLBridge, SchemaLang, Table}
+import scala.collection.mutable
+
object CodeGenerator {
- def writeToFile(p: String, s: String): Unit = {
- val pw = new PrintWriter(new java.io.File(p))
- try pw.write(s) finally pw.close()
+ private def isInputTheory(t: Theory, schemaGroup: Option[String]): Boolean = {
+ val schemaLangIsMetaTheory = t.meta.contains(SchemaLang._path)
+ val isInSchemaGroup = schemaGroup.forall(sg => {
+ t.metadata.get(SchemaLang.schemaGroup).headOption.exists(_.value.toString == sg)
+ })
+ schemaLangIsMetaTheory && isInSchemaGroup
}
def main(args: Array[String]): Unit = {
- args.foreach(println)
+ val outputDir = args(0)
+ val archiveId = args(1)
+ val schemaGroup = if (args.length > 2) Some(args(2)) else None
+
val dirPaths = ProjectPaths(
- args.headOption.getOrElse("~/DiscreteZooOutput"),
+ outputDir,
"backend/src/main/scala/xyz/discretezoo/web",
"frontend/src",
"db"
)
val jdbcInfo = JDBCInfo("jdbc:postgresql://localhost:5432/discretezoo2", "discretezoo", "D!screteZ00")
+ val prefix = "MBGEN"
val controller = Controller.make(true, true, List())
- val graphPath = SchemaLang._base ? "Graph"
- val mxPath = SchemaLang._base ? "Maniplex"
- val exJoe = SchemaLang._base ? "MatrixS"
- val exJane = SchemaLang._base ? "MatrixWithCharacteristicS"
- val mPath = SchemaLang._base ? "Matrices"
- val sg = "Mathilde"
-
// remove later
- controller.handleLine("build ODK/DiscreteZOO mmt-omdoc")
-
- def isInputTheory(t: Theory) = {
- val schemaLangIsMetaTheory = t.meta.contains(SchemaLang._path)
- val isInSchemaGroup = t.metadata.get(SchemaLang.schemaGroup).headOption.exists(_.value.toString == sg)
- schemaLangIsMetaTheory && isInSchemaGroup
- }
+ controller.handleLine(s"build $archiveId mmt-omdoc")
- val theories = controller.backend.getArchive("ODK/DiscreteZOO").get.allContent.map(controller.getO).collect({
- case Some(t : Theory) if isInputTheory(t) => t
+ // add input theories to search
+ val paths = controller.backend.getArchive(archiveId).get.allContent.map(controller.getO).collect({
+ case Some(theory : Theory) if isInputTheory(theory, schemaGroup) => theory.path
})
-// collect {
-// case Some(t : Theory) if t.meta.contains(SchemaLang._base.toMPath) => t.path
-// }
- theories.map(t => t).foreach(println)
- println(" - - - - - ")
+ val tables = new Tables(prefix, dirPaths, p => SQLBridge.test2(p, controller), jdbcInfo, paths)
+ tables.databaseCode.writeAll(true)
- val maybeTable: Option[Table] = SQLBridge.test(mPath) match {
- case t: Table => Some(t)
- case _ => None
- }
-
- maybeTable.foreach(t => {
-
- val generate = true
- val prefix = "TEST"
- val tableCode = TableCode(prefix, t)
- val dbCode = DatabaseCode(dirPaths, prefix, Seq(tableCode), jdbcInfo) // TODO: one table only
- val name = {t.name}
-
-// t.columns.map(_.collection).collect({
-// case Some(info) => info
-// }).map(_.metadata).foreach(println)
-
- if (generate) {
- // backend
- dbCode.createTablePackageDirectories()
- writeToFile(s"${dirPaths.backendPackagePath}/JsonSupport.scala", dbCode.jsonSupportCode)
- writeToFile(s"${dirPaths.backendPackagePath}/Create.scala", dbCode.mainCreateCode)
- writeToFile(s"${dirPaths.dbPackagePath}/ZooDb.scala", dbCode.fileDbCode)
- writeToFile(s"${dbCode.tablePackagePath(name)}/$name.scala", tableCode.codeCaseClass)
- writeToFile(s"${dbCode.tablePackagePath(name)}/${name}PlainQuery.scala", tableCode.codePlainQueryObject)
- writeToFile(s"${dbCode.tablePackagePath(name)}/${name}Table.scala", tableCode.codeTableClass)
- // frontend
- writeToFile(s"${dirPaths.frontendPath}/objectProperties.json", dbCode.objectPropertiesJSON)
- }
-
- })
}
}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeHelper.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeHelper.scala
new file mode 100644
index 0000000000..c6f108b54f
--- /dev/null
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/CodeHelper.scala
@@ -0,0 +1,13 @@
+package info.kwarc.mmt.sql.codegen
+
+import info.kwarc.mmt.sql.Column
+
+trait CodeHelper {
+
+ val quotes = "\"\"\""
+
+ def camelCase(s: String): String = "_([a-z\\d])".r.replaceAllIn(s, _.group(1).toUpperCase())
+ def quoted(s: String): String = s""""$s""""
+ def columnNameDB(c: Column): String = quoted(c.name.toUpperCase)
+
+}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/ColumnCode.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/ColumnCode.scala
index 1577f696c3..01a60e3e9b 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/ColumnCode.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/ColumnCode.scala
@@ -3,26 +3,50 @@ package info.kwarc.mmt.sql.codegen
import info.kwarc.mmt.api.objects.{OMA, OMS}
import info.kwarc.mmt.sql.Column
-case class ColumnCode(column: Column) {
+case class ColumnCode(column: Column, join: Option[JoinCode] = None) extends CodeHelper {
- def nameQuotes = s""""${column.name}""""
- def nameDb: String = column.name.toUpperCase
- def nameCamelCase: String = "_([a-z\\d])".r.replaceAllIn(column.name, _.group(1).toUpperCase())
+ private def nameQuotes: String = quoted(column.name)
- def typeString: String = {
+ private def typeString: String = {
if (column.dbtype.toString == "List[List[Int]]" || column.dbtype.toString == "List[Int]") s"List[Int]"
else s"Option[${column.dbtype.toString}]"
}
- def caseClassField: String = s"$nameCamelCase: $typeString"
+ private def codecName: String = column.codec match {
+ case OMS(x) => x.name.toString
+ case OMA(_, codecArgs) => codecArgs.head.toMPath.name.last.toString
+ }
+
+ def name: String = column.name
+ def nameDbQuoted: String = columnNameDB(column)
+ def isDisplayedByDefault: Boolean = column.isDisplayedByDefault
+
+ // JsonSupport
def jsonWriterMapItem: String = s"""Some($nameQuotes -> o.$nameCamelCase.toJson)"""
- def accessorMethod: String = {
- s"""def $nameCamelCase: Rep[$typeString] = column[$typeString]("$nameDb")"""
+ // CaseClass
+ def caseClassField: String = s" $nameCamelCase: $typeString"
+ def selectMapCaseClass: String = s""" "$name" -> $nameCamelCase"""
+
+ // PlainQueryObject
+ def getResultItem: String = {
+ if (join.nonEmpty) "None"
+ else typeString match {
+ case "UUID" => "r.nextObject.asInstanceOf[UUID]"
+ case "List[Int]" => "r.<<[Seq[Int]].toList"
+ case _ => "r.<<"
+ }
}
- def selectMapItem: String = s""""${column.name}" -> this.$nameCamelCase"""
+ // TableClass
+ def nameCamelCase: String = camelCase(name)
+ def accessorMethod: String = {
+ val fk = join.map(_.fkMethod).map(m => s"\n$m").getOrElse("")
+ s""" def $nameCamelCase: Rep[$typeString] = column[$typeString]($nameDbQuoted)$fk"""
+ }
+ def selectMapTableClass: String = s""" "$name" -> this.$nameCamelCase"""
+ // Frontend
def jsonObjectProperties: String = {
val colType = column.dbtype.toString match {
case "Int" => "numeric"
@@ -32,9 +56,4 @@ case class ColumnCode(column: Column) {
s"""$nameQuotes: {"isFilter": ${!column.opaque}, "display": $nameQuotes, "type": "$codecName"}"""
}
- private def codecName: String = column.codec match {
- case OMS(x) => x.name.toString
- case OMA(_, codecArgs) => codecArgs.head.toMPath.name.last.toString
- }
-
}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/DatabaseCode.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/DatabaseCode.scala
index 30a017e180..12d8143a78 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/DatabaseCode.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/DatabaseCode.scala
@@ -1,69 +1,117 @@
package info.kwarc.mmt.sql.codegen
-case class DatabaseCode(paths: ProjectPaths, name: String, tables: Seq[TableCode], dbInfo: JDBCInfo) {
+import java.io.PrintWriter
- def tablePackagePrefix: String = name
- def tablePackagePath(name: String) = s"${paths.dbPackagePath}/$tablePackagePrefix$name"
+case class DatabaseCode(prefix: String, paths: ProjectPaths, tables: Seq[Seq[TableCode]], dbInfo: JDBCInfo, viewCreation: Seq[(String, String)]) {
- def createTablePackageDirectories(): Unit = {
- tables.foreach(t => {
- val dir = new java.io.File(tablePackagePath(t.tableName))
- if (!dir.exists()) dir.mkdir()
- })
+ private def writeToFile(f: String, s: String, write: Boolean): Unit = {
+ if (write) {
+ val pw = new PrintWriter(new java.io.File(f))
+ try pw.write(s) finally pw.close()
+ }
+ else println(s)
}
- private val tableObjects = tables
- .map(t => s"object ${t.tableObject} extends TableQuery(new ${t.tableClass}(_))").mkString(",\n")
-
- def mainCreateCode: String = {
- val importTablePackages = tables
- .map(t => s"import ${t.packageString}.${t.tableClass}").mkString(",\n")
- val schemaCreateList = tables.map(t => s"${t.tableObject}.schema.create").mkString(", ")
- scala.io.Source.fromFile(s"${paths.backendPackagePath}/Create.scala").mkString
- .replaceFirst("//importTablePackages", importTablePackages)
- .replaceFirst("//tableObjects", tableObjects)
- .replaceFirst("//schemaCreateList", schemaCreateList)
+ private def tableFiles(t: TableCode): Seq[(Map[String, String], String, String)] = {
+ Seq(
+ (t.caseClassRepl, "CaseClass", ""),
+ (t.plainQueryRepl, "PlainQueryObject", "PlainQuery"),
+ (t.tableClassRepl, "TableClass", "Table")
+ )
}
- def jsonSupportCode: String = {
- val importTablePackages = tables
- .map(t => s"import ${t.packageString}.${t.caseClass}").mkString(",\n")
- val casesToJson = tables
- .map(_.jsonSupportMap).mkString("\n")
- scala.io.Source.fromFile(s"${paths.backendPackagePath}/JsonSupport.scala").mkString
- .replaceFirst("//importTablePackages", importTablePackages)
- .replaceFirst("//casesToJson", casesToJson)
+ def writeAll(generate: Boolean = true): Unit = {
+ // backend
+ Seq((zooCreateRepl, "Create"), (jsonSupportRepl, "JsonSupport"), (zooDbRepl, "db/ZooDb"))
+ .foreach(t => CodeFile(t._1, s"${paths.backendPackagePath}/${t._2}.scala").writeToFile(generate))
+ val tempDir = s"${paths.dbPackagePath}/temp"
+ def tableTempPath(s: String) = s"$tempDir/$s.scala"
+ tables.foreach(s => {
+ val t = s.head
+ val tPath = tablePackagePath(t.info.name)
+ val dir = new java.io.File(tPath)
+ if (generate && !dir.exists()) dir.mkdir()
+// val name = t.table.name
+ s.foreach(t => tableFiles(t).foreach(f => {
+ val out = Some(s"$tPath/${t.info.name}${f._3}.scala")
+ CodeFile(f._1, tableTempPath(f._2), out).writeToFile(generate)
+ }))
+ })
+
+ // delete temp dir
+ Seq(tableTempPath("CaseClass"), tableTempPath("PlainQueryObject"), tableTempPath("TableClass")).foreach(tp => {
+ val temp = new java.io.File(tp)
+ temp.delete()
+ })
+ val dir = new java.io.File(tempDir)
+ println("delete:", dir.delete())
+
+ // frontend
+ writeToFile(s"${paths.frontendPath}/config/objectProperties.json", objectPropertiesJSON, generate)
+ writeToFile(s"${paths.frontendPath}/config/collectionsData.json", collectionDataJSON, generate)
+ writeToFile(s"${paths.frontendPath}/config/settings.json", settingsJSON, generate)
}
- def fileDbCode: String = {
- val importTablePackages = tables
- .map(t => s"import ${t.packageString}.{${t.plainQueryObject}, ${t.tableClass}}").mkString(",\n")
- val getQueryMatches = tables
- .map(t =>
- s""" case ("${t.tableObject}", true) => ${t.plainQueryObject}.get(rp)
- | case ("${t.tableObject}", false) => tb${t.tableName}.dynamicQueryResults(rp).result"""
- .stripMargin).mkString("\n")
- val countQueryMatches = tables
- .map(t =>
- s""" case ("${t.tableObject}", true) => ${t.plainQueryObject}.count(qp)
- | case ("${t.tableObject}", false) => tb${t.tableName}.dynamicQueryCount(qp).length.result"""
- .stripMargin).mkString("\n")
- scala.io.Source.fromFile(s"${paths.dbPackagePath}/ZooDb.scala").mkString
- .replaceFirst("//importTablePackages", importTablePackages)
- .replaceFirst("//tableObjects", tableObjects)
- .replaceFirst("//getQueryMatches", getQueryMatches)
- .replaceFirst("//countQueryMatches", countQueryMatches)
- .replaceFirst("%jdbc%", dbInfo.jdbc)
- .replaceFirst("%user%", dbInfo.user)
- .replaceFirst("%pass%", dbInfo.pass)
+ // helpers
+
+ private def tablePackagePrefix: String = prefix
+ private def tablePackagePath(name: String) = s"${paths.dbPackagePath}/$tablePackagePrefix$name"
+ private def allTableCodes: Seq[TableCode] = tables.flatten
+ private def nonViewTableCodes: Seq[TableCode] = tables.map(_.head)
+
+ private def tableObjects(seq: Seq[TableCode]) = seq.map(_.dbTableObject).mkString("\n")
+
+ // backend code
+
+ def zooCreateRepl: Map[String, String] = Map(
+ "//tableObjects" -> tableObjects(nonViewTableCodes),
+ "//views" -> viewCreation.map(_._2).mkString("\n"),
+ "//importTablePackages" -> nonViewTableCodes.map(_.tableClassImport).mkString("\n"),
+ "//schemaCreateList" -> nonViewTableCodes.map(_.zooSchemaCreate).mkString(", "),
+ "//viewCreateList" -> viewCreation.map(_._1).mkString(", ")
+ )
+
+ private def jsonSupportRepl: Map[String, String] = Map(
+ "//importTablePackages" -> allTableCodes.map(_.jsonSupportImport).mkString("\n"),
+ "//casesToJson" -> allTableCodes.map(_.jsonSupportMap).mkString("\n")
+ )
+
+
+ private def zooDbRepl: Map[String, String] = Map(
+ "//tableObjects" -> tableObjects(allTableCodes),
+ "%jdbc%" -> dbInfo.jdbc,
+ "%user%" -> dbInfo.user,
+ "%pass%" -> dbInfo.pass,
+ "//importTablePackages" -> allTableCodes.map(_.zooDbImport).mkString("\n"),
+ "//getQueryMatches" -> allTableCodes.map(_.dbGetQueryMatches).mkString("\n"),
+ "//countQueryMatches" -> allTableCodes.map(_.dbCountQueryMatches).mkString("\n")
+ )
+
+ // frontend jsons
+
+ private def objectPropertiesJSON: String = {
+ val code = allTableCodes.map(_.jsonObjectProperties).mkString(",\n")
+ s"""{
+ |$code
+ |}
+ """.stripMargin
}
- // react
+ private def collectionDataJSON: String = {
+ val code = allTableCodes.map(_.collectionsData).mkString(",\n")
+ s"""{
+ |$code
+ |}
+ """.stripMargin
+ }
- def objectPropertiesJSON: String = {
- val code = tables.map(_.jsonObjectProperties).mkString(",\n")
+ private def settingsJSON: String = {
+ val code = allTableCodes.map(_.defaultColumns).mkString(",\n")
s"""{
+ | "title": "Search",
+ | "defaultColumns": {
|$code
+ | }
|}
""".stripMargin
}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/JoinCode.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/JoinCode.scala
new file mode 100644
index 0000000000..404a2a171e
--- /dev/null
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/JoinCode.scala
@@ -0,0 +1,11 @@
+package info.kwarc.mmt.sql.codegen
+
+case class JoinCode(tbInfo: TableInfo, columnName: String) extends CodeHelper {
+
+ def fkMethod: String = {
+ val camelName = camelCase(columnName)
+ val fk = s""" def fk${tbInfo.name} = foreignKey("${tbInfo.dbTableName}_FK", $camelName, ${tbInfo.tableObject})"""
+ s"""$fk(_.ID, onUpdate=ForeignKeyAction.Restrict, onDelete=ForeignKeyAction.Cascade)"""
+ }
+
+}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/TableCode.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/TableCode.scala
index e92c287799..7c607b86f4 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/TableCode.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/TableCode.scala
@@ -1,110 +1,68 @@
package info.kwarc.mmt.sql.codegen
-import info.kwarc.mmt.sql.{Column, Table}
-
-case class TableCode(prefix: String, table: Table) {
-
- def tableName: String = table.name
- def tableObject: String = s"tb$tableName"
- def packageString = s"xyz.discretezoo.web.db.$prefix$tableName" // package for the table specific files
- // class names
- def plainQueryObject = s"${tableName}PlainQuery"
- def tableClass = s"${tableName}Table"
- def caseClass: String = table.name
-
- private def dbTableName = s"${prefix}_${tableName.toUpperCase}" // table name in the database
-
- private def columnCodeList: Seq[ColumnCode] = table.columns.map(ColumnCode)
-
-// TODO def inCollectionMap: String = collections.map(_.inCollectionItem).mkString(",\n")
-
- def codeTableClass: String = {
- val accessorMethods = columnCodeList.map(_.accessorMethod).mkString("\n")
- val caseClassMapParameters = columnCodeList.map(_.nameCamelCase).mkString(" ::\n")
- val selectMap = columnCodeList.map(_.selectMapItem).mkString(",\n")
-
- s"""package $packageString
- |import java.util.UUID
- |import slick.collection.heterogeneous.HNil
- |import slick.lifted.{ProvenShape, Rep}
- |import xyz.discretezoo.web.DynamicSupport.ColumnSelector
- |import xyz.discretezoo.web.ZooPostgresProfile.api._
- |
- |final class $tableClass(tag: Tag) extends Table[$caseClass](tag, "$dbTableName") with ColumnSelector {
- |
- |def ID: Rep[UUID] = column[UUID]("ID", O.PrimaryKey)
- |$accessorMethods
- |
- |def * : ProvenShape[$caseClass] = (
- |ID ::
- |$caseClassMapParameters ::
- |HNil
- |).mapTo[$caseClass]
- |
- |val select: Map[String, Rep[_]] = Map(
- |$selectMap
- |)
- |
- |val inCollection: Map[String, Rep[Boolean]] = Map(
- |"ID" -> true
- |)
- |
- |}""".stripMargin
- }
-
- def codeCaseClass: String = {
- val cols = columnCodeList.map(_.caseClassField).mkString(",\n")
- val selectMap = columnCodeList.map(_.selectMapItem).mkString(",\n")
-
- s"""package $packageString
- |import java.util.UUID
- |import xyz.discretezoo.web.ZooObject
- |
- |case class $caseClass(
- |ID: UUID,
- |$cols) extends ZooObject {
- |
- |def select: Map[String, _] = Map(
- |$selectMap
- |)
- |
- |}""".stripMargin
- }
-
- def codePlainQueryObject: String = {
- val getResultParameters = table.columns.map(c => {
- val special = Seq("UUID", "List[Int]")
- val column = ColumnCode(c)
- column.typeString match {
- case "UUID" => "r.nextObject.asInstanceOf[UUID]"
- case "List[Int]" => "r.<<[Seq[Int]].toList"
- case _ => "r.<<"
- }
- }).mkString(", ")
-
- s"""package $packageString
- |import java.util.UUID
- |import slick.jdbc.GetResult
- |import xyz.discretezoo.web.PlainSQLSupport
- |import xyz.discretezoo.web.ZooPostgresProfile.api._
- |
- |object $plainQueryObject extends PlainSQLSupport[$caseClass] {
- |
- |override val tableName: String = "$dbTableName"
- |override implicit val getResult: GetResult[$caseClass] = GetResult(r => $caseClass(r.nextObject.asInstanceOf[UUID], $getResultParameters))
- |
- |val inCollection: Map[String, String] = Map(
- |"$tableName" -> "ID"
- |)
- |
- |}""".stripMargin
- }
-
+case class TableCode( info: TableInfo,
+ dbPackagePath: String,
+ columns: Seq[ColumnCode],
+ datasetName: String,
+ joins: Seq[TableInfo]
+) extends CodeHelper {
+
+ private val baseRepl: Map[String, String] = Map(
+ "//package" -> s"package ${info.packageString}",
+ "%caseClass%" -> info.caseClass
+ )
+
+ // CaseClass
+ def caseClassRepl: Map[String, String] = baseRepl ++ Map(
+ "//cols" -> columns.map(_.caseClassField).mkString(",\n"),
+ "//selectMap" -> columns.map(_.selectMapCaseClass).mkString(",\n")
+ )
+
+ // PlainQueryObject
+ private def plainQueryObject: String = s"${info.name}PlainQuery"
+ def plainQueryRepl: Map[String, String] = baseRepl ++ Map(
+ "%plainQueryObject%" -> plainQueryObject,
+ "%tableName%" -> info.name,
+ "%getResultParameters%" -> columns.map(_.getResultItem).mkString(", "),
+ "%sqlFrom%" -> info.dbTableName, // table name in the database
+ "//columns" -> (quoted("ID") +: columns.filter(_.join.isEmpty).map(_.nameDbQuoted)).map(c => s""" |${quoted(info.dbTableName)}.$c""").mkString(",\n")
+ )
+
+ // TableClass
+ def tableClassRepl: Map[String, String] = baseRepl ++ Map(
+ "//joinImports" -> joins.map(_.joinImport).mkString("\n"),
+ "//joinQueries" -> joins.map(_.joinQuery).mkString("\n"),
+ "//accessorMethods" -> columns.map(_.accessorMethod).mkString("\n"),
+ "//caseClassMapParameters" -> columns.map(_.nameCamelCase).mkString(" ::\n"),
+ "//selectMap" -> columns.map(_.selectMapTableClass).mkString(",\n"),
+ "%dbTableName%" -> info.dbTableName, // table name in the database
+ "%tableClass%" -> info.tableClass
+ )
+
+ // Create
+ def tableClassImport: String = s"import ${info.packageString}.${info.tableClass}"
+ def zooSchemaCreate: String = s"${info.tableObject}.schema.createIfNotExists"
+
+ // ZooDb
+ def zooDbImport: String = s"import ${info.packageString}.{$plainQueryObject, ${info.tableClass}}"
+ def dbTableObject: String = s" object ${info.tableObject} extends TableQuery(new ${info.tableClass}(_))"
+ def dbCountQueryMatches: String =
+ s""" case ("${info.tableObject}", true) => $plainQueryObject.count(qp)
+ | case ("${info.tableObject}", false) => tb${info.name}.dynamicQueryCount(qp).length.result""".stripMargin
+
+ def dbGetQueryMatches: String =
+ s""" case ("${info.tableObject}", true) => $plainQueryObject.get(rp)
+ | case ("${info.tableObject}", false) => tb${info.name}.dynamicQueryResults(rp).result""".stripMargin
+
+ // TODO def inCollectionMap: String = collections.map(_.inCollectionItem).mkString(",\n")
+
+ // JsonSupport
+ def jsonSupportImport: String = s"import ${info.packageString}.${info.caseClass}"
def jsonSupportMap: String = {
- val columns = columnCodeList.map(c => s" ${c.jsonWriterMapItem}").mkString(",\n")
- s"""case o: $caseClass => JsObject(
+ val cols = columns.map(c => s" ${c.jsonWriterMapItem}").mkString(",\n")
+ s"""case o: ${info.caseClass} => JsObject(
| List(
- |$columns
+ |$cols
| ).flatten: _*
| )""".stripMargin
}
@@ -112,22 +70,48 @@ case class TableCode(prefix: String, table: Table) {
// from here on react stuff
def jsonObjectProperties: String = {
- val columns = columnCodeList.map(_.jsonObjectProperties).mkString(",\n")
- s""""$tableObject": {
- |$columns
+ val cols = columns.map(_.jsonObjectProperties).mkString(",\n")
+ s""""${info.tableObject}": {
+ |$cols
|}
""".stripMargin
}
def collectionsData: String = {
- val columns = columnCodeList.map(_.jsonObjectProperties).mkString(",\n")
- s""""$tableObject": {
- | $tableName: {
- | "id": "$tableName",
- | "name": "Table description"
+ s""""${info.tableObject}": {
+ | "${info.name}": {
+ | "id": "${info.name}",
+ | "name": "$datasetName"
| }
|}
""".stripMargin
}
+ def defaultColumns: String = {
+ val cols = columns.filter(_.isDisplayedByDefault).map(c => s""""${c.name}"""").mkString(", ")
+ s""" "${info.tableObject}": [$cols]""".stripMargin
+ }
+
+ def dbColumns(withID: Boolean): Seq[String] = {
+ val maybeID = if (withID) Seq(quoted("ID")) else Seq()
+ (maybeID ++ columns.filter(_.join.isEmpty).map(_.nameDbQuoted)).map(c => s"${quoted(info.dbTableName)}.$c")
+ }
+
+ def dbJoinSQL: String = {
+ columns.collect({
+ case ColumnCode(c, Some(j)) => (c, j)
+ }).map(t => {
+ val tbName = quoted(t._2.tbInfo.dbTableName)
+ s"""JOIN $tbName ON $tbName."ID" = ${quoted(info.dbTableName)}.${columnNameDB(t._1)}"""
+ }).mkString("\n")
+ }
+
+ override def toString: String =
+ s"""TableCode: ${info.prefix} ${info.name} ($dbPackagePath) "$datasetName"
+ |#columns:
+ |${columns.map(_.toString).mkString("\n")}
+ |#joins:
+ |${joins.map(t => s"${t.prefix} ${t.name}").mkString("\n")}
+ """.stripMargin
+
}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/TableInfo.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/TableInfo.scala
new file mode 100644
index 0000000000..a4a5c221b7
--- /dev/null
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/TableInfo.scala
@@ -0,0 +1,23 @@
+package info.kwarc.mmt.sql.codegen
+
+case class TableInfo(prefix: String, name: String) {
+
+ def tablePackageName: String = prefix + name
+ def dbTableName: String = s"${prefix}_${name.toUpperCase}"
+ def viewName = s"${name}View"
+ def dbViewName: String = s"${prefix}_${viewName.toUpperCase}"
+ def packageString = s"xyz.discretezoo.web.db.$tablePackageName" // package for the table specific files
+
+ def caseClass: String = name
+ def tableObject: String = s"tb$name"
+ def viewObject: String = s"view$name"
+ def tableClass: String = s"${name}Table"
+
+ def joinImport: String = s"import $packageString.$tableClass"
+ def joinQuery: String = s" val $tableObject = TableQuery[$tableClass]"
+
+ def dbObject(name: String): String = {
+ s"object $tableObject extends TableQuery(new $tableClass(_))"
+ }
+
+}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/Tables.scala b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/Tables.scala
new file mode 100644
index 0000000000..23ae5dea98
--- /dev/null
+++ b/src/mmt-lf/src/info/kwarc/mmt/sql/codegen/Tables.scala
@@ -0,0 +1,96 @@
+package info.kwarc.mmt.sql.codegen
+
+import info.kwarc.mmt.api.MPath
+import info.kwarc.mmt.sql.Table
+
+import scala.collection.mutable
+
+class Tables(prefix: String, dirPaths: ProjectPaths, readerCallback: MPath => Any, jdbcInfo: JDBCInfo, inputTheories: Seq[MPath] = Seq()) extends CodeHelper {
+
+ // - None: haven't attempted retrieval yet
+ // - Some(None): retrieved, not table
+ private val dbPackagePath: String = dirPaths.dbPackagePath
+ private val builder: mutable.Map[MPath, Option[Option[TableCode]]] = mutable.Map[MPath, Option[Option[TableCode]]]()
+ private def inputTableCodes: Seq[TableCode] = inputTheories.map(builder).collect({
+ case Some(Some(t: TableCode)) => t
+ })
+
+ val tableMap: Map[TableInfo, TableCode] = processTables()
+ val viewMap: Map[TableInfo, TableCode] = processViews()
+ val views: Seq[(String, String)] = tableMap.values.filter(_.joins.nonEmpty).map(generateView).toSeq
+
+ def databaseCode: DatabaseCode = {
+ val tables = tableMap.map(t => Seq(t._2) ++ viewMap.get(t._2.info)).toSeq
+ DatabaseCode(prefix, dirPaths, tables, jdbcInfo, views)
+ }
+
+ def printBuilder(): Unit = builder.foreach({
+ case (p, None) => println(s"Unchecked: $p")
+ case (p, Some(None)) => println(s"Not table: $p")
+ case (p, Some(Some(t))) => println(s"Table: ${t.info.name}")
+ })
+
+ private def generateView(t: TableCode): (String, String) = {
+ val joins = t.joins
+ val columnList = t.dbColumns(true) ++ joins.flatMap(j => tableMap(j).dbColumns(false))
+ val viewSQL = s""" val ${t.info.viewObject}: DBIO[Int] =
+ | sqlu$quotes
+ | CREATE VIEW ${quoted(t.info.dbViewName)} AS
+ | SELECT ${columnList.mkString(", ")}
+ | FROM ${quoted(t.info.dbTableName)}
+ | ${t.dbJoinSQL};
+ | $quotes
+ """.stripMargin
+ (t.info.viewObject, viewSQL)
+ }
+
+ private def processTables(): Map[TableInfo, TableCode] = {
+ if (inputTheories.nonEmpty) inputTheories.foreach(addPath)
+ while (unprocessed.nonEmpty) {
+ unprocessed.foreach(path => {
+ readerCallback(path) match {
+ case table: Table =>
+ table.includes.foreach(addPath)
+ processPath(path, Some(getTableCode(table)))
+ case _ => processPath(path, None)
+ }
+ })
+ }
+ builder.collect({ case (_, Some(Some(t: TableCode))) => (t.info, t) }).toMap
+ }
+
+ private def processViews(): Map[TableInfo, TableCode] = {
+ inputTableCodes.filter(_.joins.nonEmpty).map(t => {
+ val view = getView(t.info)
+ (t.info, view)
+ }).toMap
+ }
+
+ // assume that the column names are unique
+ private def getView(t: TableInfo): TableCode = {
+ val tbCode = tableMap(t)
+ def getColumns(t: TableInfo) = tableMap(t).columns.filter(_.join.isEmpty)
+ val cols = getColumns(t) ++ tbCode.joins.flatMap(getColumns)
+ TableCode(TableInfo(t.prefix, t.viewName), dbPackagePath, cols, tbCode.datasetName, Seq())
+ }
+
+ private def getTableCode(table: Table): TableCode = {
+ val joins = table.columns.map(c => (c, c.foreignKey))
+ .collect({ case (c, Some(p: MPath)) => (c, readerCallback(p))})
+ .collect({ case (c, t: Table) => JoinCode(TableInfo(prefix, t.name), c.name) })
+ val cols = table.columns.map(c => ColumnCode(c, joins.find(_.columnName == c.name)))
+ TableCode(TableInfo(prefix, table.name), dbPackagePath, cols, table.datasetName.getOrElse(""), joins.map(_.tbInfo))
+ }
+
+ private def addPath(p: MPath): Unit = {
+ if (!builder.contains(p)) builder += (p -> None)
+ }
+
+ private def processPath(p: MPath, t: Option[TableCode]): Unit = {
+ if (builder.contains(p)) builder -= p
+ builder += (p -> Some(t))
+ }
+
+ private def unprocessed: Seq[MPath] = builder.collect({ case (p, None) => p }).toSeq
+
+}
diff --git a/src/mmt-lf/src/info/kwarc/mmt/test/DiagTest.scala b/src/mmt-lf/src/info/kwarc/mmt/test/DiagTest.scala
index d81b8941b6..0533713e01 100644
--- a/src/mmt-lf/src/info/kwarc/mmt/test/DiagTest.scala
+++ b/src/mmt-lf/src/info/kwarc/mmt/test/DiagTest.scala
@@ -6,6 +6,6 @@ object DiagTest extends MMTIntegrationTest()(
ExtensionSpec("info.kwarc.mmt.lf.Plugin")
) {
def main(): Unit = {
- shouldCheck("MMT/mathscheme", "Example.mmt")()
+ shouldCheck("MMT/Mathscheme", "Example.mmt")()
}
}
diff --git a/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/JSONImporter.scala b/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/JSONImporter.scala
index c4a45ff30e..c4abed43f5 100644
--- a/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/JSONImporter.scala
+++ b/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/JSONImporter.scala
@@ -20,7 +20,7 @@ sealed abstract class ParsedObject {
case class ParsedMethod(op : String, filters : List[List[String]], comment : String, rank : Int) {
val dependencies = op :: filters.flatten.distinct
}
-case class ParsedProperty(name : String, aka : List[String], implied : List[String], isTrue :Boolean = false,location : (String,Int)) extends ParsedObject {
+case class ParsedProperty(name : String, aka : List[String], implied : List[String], isTrue :Boolean = false,location : (String,Int),methods:List[ParsedMethod]) extends ParsedObject {
val dependencies = implied
val tp = "GAPProperty"
}
@@ -33,7 +33,7 @@ case class ParsedCategory(name : String, aka : List[String], implied : List[Stri
val dependencies = implied
val tp = "GAPCategory"
}
-case class ParsedAttribute(name : String, aka : List[String], filters : List[String],location : (String,Int)) extends ParsedObject {
+case class ParsedAttribute(name : String, aka : List[String], filters : List[String],location : (String,Int),methods:List[ParsedMethod]) extends ParsedObject {
val dependencies = filters
val tp = "GAPAttribute"
}
@@ -46,6 +46,11 @@ case class ParsedFilter(name : String, aka : List[String], implied : List[String
val tp = "GAPFilter"
}
+case class ParsedConstructor(name : String, aka : List[String],location : (String,Int),filters: List[String],methods:List[ParsedMethod]) extends ParsedObject {
+ override val dependencies: List[String] = filters
+ val tp = "Constructor"
+}
+
case class ParsedDefinedFilter(name : String, aka : List[String], conjof : List[String],location : (String,Int)) extends ParsedObject {
val dependencies = conjof
val tp = "GAPDefinedFilter"
@@ -153,11 +158,13 @@ class GAPJSONImporter extends Importer {
*/
- val locations = {
+ val locations = try {
val locobj = try { obj.getAsList(classOf[JSONObject],"locations").head } catch {
case e : Exception => obj.getAs(classOf[JSONObject],"location")
}
- (locobj.getAsString("file"),locobj.getAsInt("line").toInt)
+ (locobj.getAsString("file"),Try(locobj.getAsInt("line").toInt).getOrElse(0))
+ } catch {
+ case ParseError(_) => ("",0)
}
// TODO: don't just take the head!
@@ -165,12 +172,14 @@ class GAPJSONImporter extends Importer {
case "GAP_AndFilter" =>
val conjs = obj.getAsList(classOf[String],"conjunction_of").filter(redfilter(name))
(ParsedDefinedFilter(name,aka,conjs,locations),List("conjunction_of"))
- case "GAP_Property" | "GAP_TrueProperty" =>
+ case "GAP_Property" | "GAP_TrueProperty"| "Property" =>
val impls = Try(obj.getAsList(classOf[String],"implied")).getOrElse(
obj.getAsList(classOf[String],"filters")).filter(redfilter(name))
- (ParsedProperty(name,aka,impls,if (tp == "GAP_TrueProperty") true else false,locations),List("implied","filters"))
+ val methods = doMethods(obj.getAs(classOf[JSONObject],"methods"),name)
+ allmethods :::= methods
+ (ParsedProperty(name,aka,impls,if (tp == "GAP_TrueProperty") true else false,locations,methods),List("implied","filters","methods"))
- case "GAP_Operation" =>
+ case "GAP_Operation"| "Operation" =>
val filters = obj.getAsList(classOf[JSONArray],"filters") map (_.values.toList map {
case JSONArray(ls@_*) => (ls.toList map {
case JSONString(s) => s
@@ -182,9 +191,11 @@ class GAPJSONImporter extends Importer {
allmethods :::= methods
(ParsedOperation(name,aka,filters,methods,locations),List("filters","methods"))
- case "GAP_Attribute" =>
+ case "GAP_Attribute"| "Attribute" =>
val filters = obj.getAsList(classOf[String],"filters").filter(redfilter(name))
- (ParsedAttribute(name,aka,filters,locations),List("filters"))
+ val methods = doMethods(obj.getAs(classOf[JSONObject],"methods"),name)
+ allmethods :::= methods
+ (ParsedAttribute(name,aka,filters,locations,methods),List("filters","methods"))
case "GAP_Representation" =>
val implied = obj.getAsList(classOf[String],"implied").filter(redfilter(name))
@@ -198,10 +209,19 @@ class GAPJSONImporter extends Importer {
val implied = obj.getAsList(classOf[String],"implied").filter(redfilter(name))
(ParsedFilter(name,aka,implied,locations),List("implied"))
+ case "Constructor" =>
+ val filters = obj.getAsList(classOf[String],"filters").filter(redfilter(name))
+ val methods = doMethods(obj.getAs(classOf[JSONObject],"methods"),name)
+ allmethods :::= methods
+ (ParsedConstructor(name,aka,locations,filters,methods),List("filters","methods"))
+
+ case "Function" =>
+ // TODO?
+ return
case _ => throw new ParseError("Type not yet implemented: " + tp + " in " + obj)
}
val missings = obj.map.filter(p => !("name" :: "type" :: "aka" :: "location" :: "locations" :: eaten).contains(p._1.value))
- if (missings.nonEmpty) throw new ParseError("Type " + tp + " has additional fields " + missings)
+ if (missings.nonEmpty) println("Type " + tp + " has additional fields " + missings) // throw new ParseError("Type " + tp + " has additional fields " + missings)
// log("Added: " + ret)
all ::= ret
}
@@ -247,9 +267,9 @@ class GAPJSONImporter extends Importer {
new DeclaredFilter(LocalName(name),implied map deps,loc)
case ParsedRepresentation(name,aka,implied,loc) =>
new GAPRepresentation(LocalName(name),implied map deps,loc)
- case ParsedProperty(name,aka,implied,istrue,loc) =>
+ case ParsedProperty(name,aka,implied,istrue,loc,methods) =>
new DeclaredProperty(LocalName(name),implied map deps,istrue,loc)
- case ParsedAttribute(name,aka,filters,loc) =>
+ case ParsedAttribute(name,aka,filters,loc,methods) =>
new DeclaredAttribute(LocalName(name), filters map deps,loc)
case ParsedCategory(name,aka,implied,locations) =>
new DeclaredCategory(LocalName(name), implied map deps,locations)
@@ -257,6 +277,8 @@ class GAPJSONImporter extends Importer {
new DeclaredOperation(LocalName(name),filters map (_ map (_ map deps)),locations)
case ParsedDefinedFilter(name,aka,conjof,location) =>
new DefinedFilter(LocalName(name), conjof map deps,location)
+ case ParsedConstructor(name, aka, location, filters, methods) =>
+ new Constructor(LocalName(name),filters map deps,location)
}
dones += ((po,ret))
ret
@@ -438,8 +460,10 @@ class DeclaredFilter(val name : LocalName, val implied : List[GAPObject], val lo
class DefinedFilter(val name : LocalName, val conjs : List[GAPObject], val locations : (String,Int)) extends DeclaredObject with GAPFilter {
override def toString = "DefinedFilter " + name + " " + locations
val dependencies = conjs.flatMap(_.getInner).distinct
- def defi : Term = if (conjs.length == 1) Translator.objtotype(conjs.head) else
- conjs.init.foldRight(Translator.objtotype(conjs.last))((o,t) => GAP.termconj(Translator.objtotype(o),t))
+ def defi : Option[Term] = if (conjs.isEmpty) {
+ None
+ } else if (conjs.length == 1) Some(Translator.objtotype(conjs.head)) else
+ Some(conjs.init.foldRight(Translator.objtotype(conjs.last))((o,t) => GAP.termconj(Translator.objtotype(o),t)))
}
trait GAPCategory extends GAPFilter
@@ -470,6 +494,12 @@ class DeclaredAttribute(val name : LocalName, val filters: List[GAPObject], val
override def toString = "DeclaredAttribute " + name + " " + locations
val dependencies = filters.flatMap(_.getInner).distinct
}
+class Constructor (val name : LocalName, val filters: List[GAPObject], val locations : (String,Int))
+ extends DeclaredObject {
+ override def toString: String = "Constructor " + name + " " + locations
+ override val dependencies: List[DeclaredObject] = filters.flatMap(_.getInner).distinct
+ val returntype : Option[GAPObject] = None
+}
trait GAPProperty extends GAPAttribute
class DeclaredProperty(name : LocalName, implied : List[GAPObject], val istrue : Boolean, locations : (String,Int))
extends DeclaredAttribute(name,implied,locations) with GAPProperty {
diff --git a/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/Translator.scala b/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/Translator.scala
index 1592238824..663dddd4bb 100644
--- a/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/Translator.scala
+++ b/src/mmt-odk/src/info/kwarc/mmt/odk/GAP/Translator.scala
@@ -65,7 +65,7 @@ class Translator(controller: Controller, bt: BuildTask, index: Document => Unit,
obj.dependencies.foreach(doObject)
val (deps,cs) = obj match {
case df : DefinedFilter =>
- var consts = List(Constant(OMMOD(df.path.module),df.name,Nil,Some(GAP.filter),Some(df.defi),None))
+ var consts = List(Constant(OMMOD(df.path.module),df.name,Nil,Some(GAP.filter),df.defi,None))
addDependencies(df.path.module,df.dependencies)
(IsFilter(consts.last.path) :: doImpls(consts.head,df.dependencies),consts)
@@ -86,6 +86,17 @@ class Translator(controller: Controller, bt: BuildTask, index: Document => Unit,
}
addDependencies(a.path.module,obj.dependencies)
(IsAttribute(consts.last.path) :: doImpls(consts.head,a.dependencies),consts)
+ case a : Constructor =>
+ val filters = a.filters
+ var consts = List(Constant(OMMOD(a.path.module),a.name,Nil,Some(Arrow(GAP.obj,GAP.obj)),None,None))
+ //addConstant(c)
+ if (a.returntype.isDefined) {
+ consts ::= Constant(OMMOD(a.path.module),LocalName(a.name + "_type"),Nil,Some(
+ typeax(a,List(filters),a.returntype.get)
+ ),None,None)
+ }
+ addDependencies(a.path.module,obj.dependencies)
+ (doImpls(consts.head,a.dependencies),consts)
case op : DeclaredOperation =>
opcounter = 0
val filters = op.filters
diff --git a/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXBuildTarget.scala b/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXBuildTarget.scala
index 8ad850e5aa..276d4302ae 100644
--- a/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXBuildTarget.scala
+++ b/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXBuildTarget.scala
@@ -19,19 +19,21 @@ import scala.concurrent.duration._
import scala.sys.process.{ProcessBuilder, ProcessLogger}
/** common code for sms, latexml und pdf generation */
-abstract class LaTeXBuildTarget extends TraversingBuildTarget with STeXAnalysis with BuildTargetArguments {
- val localpathsFile = "localpaths.tex"
- val inDim = source
- var pipeOutput: Boolean = false
- val pipeOutputOption: String = "pipe-worker-output"
+abstract class LaTeXBuildTarget extends TraversingBuildTarget with STeXAnalysis with BuildTargetArguments
+{
+ val localpathsFile : String = "localpaths.tex"
+ val inDim : ArchiveDimension = source
+ var pipeOutput : Boolean = false
+ val pipeOutputOption : String = "pipe-worker-output"
+
/** timout in seconds */
- private val timeoutDefault: Int = 300
- protected var timeoutVal: Int = timeoutDefault
- protected val timeoutOption: String = "timeout"
- protected var nameOfExecutable: String = ""
+ private val timeoutDefault : Int = 300
+ protected var timeoutVal : Int = timeoutDefault
+ protected val timeoutOption : String = "timeout"
+ protected var nameOfExecutable : String = ""
protected case class LatexError(s: String, l: String) extends ExtensionError(key, s) {
- override val extraMessage = l
+ override val extraMessage : String = l
}
protected def commonOpts: OptionDescrs = List(
@@ -46,9 +48,9 @@ abstract class LaTeXBuildTarget extends TraversingBuildTarget with STeXAnalysis
override def start(args: List[String]) {
anaStartArgs(args)
pipeOutput = optionsMap.get(pipeOutputOption).isDefined
- optionsMap.get(timeoutOption).foreach { case v => timeoutVal = v.getIntVal }
- optionsMap.get(key).foreach { case v => nameOfExecutable = v.getStringVal }
- optionsMap.get("execute").foreach { case v =>
+ optionsMap.get(timeoutOption).foreach(v => timeoutVal = v.getIntVal)
+ optionsMap.get(key).foreach(v => nameOfExecutable = v.getStringVal)
+ optionsMap.get("execute").foreach { v =>
if (nameOfExecutable.isEmpty) nameOfExecutable = v.getStringVal
else logError("executable already set by: --" + key + "=" + nameOfExecutable)
}
@@ -199,7 +201,7 @@ abstract class LaTeXDirTarget extends LaTeXBuildTarget {
val outDim: ArchiveDimension = source
override val outExt = "tex"
- override def getFolderOutFile(a: Archive, inPath: FilePath) = a / outDim / inPath
+ override def getFolderOutFile(a: Archive, inPath: FilePath) : File = a / outDim / inPath
// we do nothing for single files
def reallyBuildFile(bt: BuildTask): BuildResult = BuildEmpty("nothing to do for files")
@@ -251,7 +253,7 @@ abstract class LaTeXDirTarget extends LaTeXBuildTarget {
override def buildDepsFirst(a: Archive, up: Update, in: FilePath = EmptyPath) {
a.traverse[Unit](inDim, in, TraverseMode(includeFile, includeDir, parallel))({
- case _ =>
+ _ =>
}, {
case (c@Current(inDir, inPath), _) =>
buildDir(a, inPath, inDir, force = false)
diff --git a/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXML.scala b/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXML.scala
index 4e4ac853a6..a07f059690 100644
--- a/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXML.scala
+++ b/src/mmt-stex/src/info/kwarc/mmt/stex/LaTeXML.scala
@@ -19,7 +19,7 @@ class AllPdf extends LaTeXDirTarget {
BuildResult.empty
}
- override def estimateResult(bt: BuildTask) = {
+ override def estimateResult(bt: BuildTask): BuildSuccess = {
if (bt.isDir) {
val a = bt.archive
val ls = getAllFiles(bt).map(f => FileBuildDependency("pdflatex", a, bt.inPath / f))
@@ -259,7 +259,7 @@ class LaTeXML extends LaTeXBuildTarget {
var optLevel: Option[Level.Level] = None
var msg: List[String] = Nil
var newMsg = true
- var region = SourceRegion.none
+ var region : SourceRegion = SourceRegion.none
var phase = 1
def phaseToString(p: Int): String = "latexml-" + (p match {
@@ -557,19 +557,27 @@ class PdfLatex extends LaTeXBuildTarget {
}
}
-class TikzSvg extends PdfLatex {
- override val key = "tikzsvg"
- override val outExt = "svg"
- override val outDim = source
+class TikzSvg extends PdfLatex
+{
+ override val key : String = "tikzsvg"
+ override val outExt : String = "svg"
+ override val outDim : ArchiveDimension = content
override def includeDir(n: String): Boolean = n.endsWith("tikz")
- override def reallyBuildFile(bt: BuildTask): BuildResult = {
- val pdfFile = bt.inFile.setExtension("pdf")
- val svgFile = bt.inFile.setExtension("svg")
+ override def reallyBuildFile(bt: BuildTask): BuildResult =
+ {
+ // ToDo: This pdf is ~technically~ also generated content,
+ // suggesting it should be elsewhere. But a bunch
+ // of things assume it's a sibling from the inFile so
+ // its complicated. Link? Copy? Ignore?
+ val pdfFile : File = bt.inFile.setExtension("pdf")
+ val svgFile : File = bt.outFile
+
bt.outFile.delete()
createLocalPaths(bt)
val output = new StringBuffer()
+
try {
val exit = runPdflatex(bt, output)
if (exit != 0) {
@@ -581,6 +589,7 @@ class TikzSvg extends PdfLatex {
val exitConvert = timeout(pb, procLogger(output, pipeOutput = pipeOutput))
if (exitConvert == 0 && svgFile.length() > 0)
logSuccess(bt.outPath)
+
else {
bt.errorCont(LatexError(if (exitConvert != 0) "exit code " + exitConvert
else "no svg created", output.toString))
diff --git a/src/mmt-stex/src/info/kwarc/mmt/stex/STeXAnalysis.scala b/src/mmt-stex/src/info/kwarc/mmt/stex/STeXAnalysis.scala
index 2cb44d1108..e4656afa15 100644
--- a/src/mmt-stex/src/info/kwarc/mmt/stex/STeXAnalysis.scala
+++ b/src/mmt-stex/src/info/kwarc/mmt/stex/STeXAnalysis.scala
@@ -4,10 +4,19 @@ import info.kwarc.mmt.api.archives._
import info.kwarc.mmt.api.utils.{File, FilePath}
import info.kwarc.mmt.stex.STeXUtils._
-case class STeXStructure(smslines: List[String], deps: List[Dependency]) {
- def join(s2: STeXStructure): STeXStructure =
- if (s2.smslines.isEmpty && s2.deps.isEmpty) this
- else STeXStructure(s2.smslines ++ smslines, s2.deps ++ deps)
+/**
+ * Monoidal accumulator for SMS content and dependencies.
+ * @param smslines SMS content (List[String])
+ * @param deps Collected dependencies (List[Dependency])
+ */
+case class STeXStructure(smslines: List[String], deps: List[Dependency])
+{
+ def empty : STeXStructure = STeXStructure(List.empty,List.empty)
+
+ def <>(that : STeXStructure) : STeXStructure =
+ if (that.smslines.isEmpty && that.deps.isEmpty) {
+ this
+ } else STeXStructure(that.smslines ++ smslines, that.deps ++ deps)
}
/**
@@ -47,20 +56,79 @@ trait STeXAnalysis {
}
}
+ def mhReposClosure(archive: Archive, parents : Set[File], r : String, b : String) : List[Dependency] =
+ {
+ // Single dependency
+ val nu_dep : List[Dependency] = mhRepos(archive, r, b)
+
+ // Find distant archive given by r.
+ val distantArchive : Archive = Option(r).map(getArgMap) match
+ {
+ case Some(m) =>
+ // Compare mkDep
+ assert(m.get("mhrepos").isDefined)
+ val root = archive.root.up.up / m("mhrepos")
+ controller.addArchive(root)
+ val thearchive = controller.backend.getArchive(root)
+ assert(thearchive.isDefined)
+ thearchive.get
+
+ case None => archive // fallback
+ }
+
+ val theFile : File = if (archive == distantArchive) { entryToPath(b).toFile }
+ else { (distantArchive.root / "source" / b).setExtension("tex") }
+
+ val closure : List[Dependency] = getDeps(distantArchive,theFile,parents + theFile)
+ nu_dep ::: closure
+ }
+
protected def toKeyDep(d: Dependency, key: String): Dependency = d match {
case FileBuildDependency(_, ar, fp) => FileBuildDependency(key, ar, fp)
case fd => fd
}
- protected def matchPathAndRep(a: Archive, inFile: File, line: String, parents: Set[File]): List[Dependency] =
- line match {
- case beginModnl(_, _, b) => List(mkFileDep(a, entryToPath(b)))
+ protected def matchPathAndRepo(archive: Archive, inFile: File, line: String, parents: Set[File]) : List[Dependency] =
+ {
+ line match
+ {
+ case beginModnl(_, _, b) => List(mkFileDep(archive, entryToPath(b)))
+
+ // The next four take relative paths so it's okay not to include that.
case input(_, _, _, b) =>
+ // As below with slight alterations for libinputs.
val p = if (line.startsWith("\\lib"))
- STeXUtils.getAmbleFile(b + ".tex", a, None)
+ STeXUtils.getAmbleFile(b + ".tex", archive, None)
else (inFile.up / b).setExtension("tex")
val d = PhysicalDependency(p)
- d :: (if (!parents.contains(p)) getDeps(a, p, parents + p) else Nil)
+ d :: (if (!parents.contains(p)) getDeps(archive, p, parents + p) else Nil)
+
+ case includeAssignment(_,_,b) =>
+ // "a13" -> File(a13.tex)
+ val pfile : File = (inFile.up / b).setExtension("tex")
+ // The including file naturally physically depends on the included file.
+ val pdep : Dependency = PhysicalDependency(pfile)
+ // All dependencies of the included file are now also dependencies of the including file.
+ if (!parents.contains(pfile)) {
+ pdep :: getDeps(archive, pfile, parents + pfile)
+ } else { List(pdep) }
+
+ case inputAssignment(_,_,b) =>
+ // As above.
+ val pfile : File = (inFile.up / b).setExtension("tex")
+ val pdep : Dependency = PhysicalDependency(pfile)
+ if (!parents.contains(pfile)) {
+ pdep :: getDeps(archive, pfile, parents + pfile)
+ } else { List(pdep) }
+
+ case includeProblem(_,_,b) =>
+ // As above.
+ val pfile : File = (inFile.up / b).setExtension("tex")
+ val pdep : Dependency = PhysicalDependency(pfile)
+ if (!parents.contains(pfile)) {
+ pdep :: getDeps(archive, pfile, parents + pfile)
+ } else { List(pdep) }
+
case includeGraphics(_, _, b) =>
val gExts = List("png", "jpg", "eps", "pdf")
val p = inFile.up / b
@@ -72,20 +140,30 @@ trait STeXAnalysis {
if (os.isEmpty) log(inFile + " misses graphics file: " + b)
os.toList.map(s => PhysicalDependency(p.setExtension(s)))
}
+
case importOrUseModule(r) =>
getArgMap(r).get("load").map(f => PhysicalDependency(File(f).setExtension(".sms"))).toList
+
case mhinputRef(_, r, b) =>
val fp = entryToPath(b)
+ val alldeps = getDeps(archive,fp.toFile,Set.empty)
Option(r) match {
- case Some(id) => mkDep(a, id, fp)
- case None => List(mkFileDep(a, fp))
+ case Some(id) => mkDep(archive, id, fp) ::: alldeps
+ case None => List(mkFileDep(archive, fp)) ::: alldeps
}
- case tikzinput(_, r, b) => mhRepos(a, r, b).map(toKeyDep(_, "tikzsvg"))
- case guse(r, b) => mkDep(a, r, entryToPath(b))
- case useMhProblem(_, r, b) => createMhImport(a, r, b).flatMap(_.deps).map(toKeyDep(_, "tikzsvg"))
- case includeMhProblem(_, r, b) => mhRepos(a, r, b)
+
+ case tikzinput(_, r, b) => mhRepos(archive, r, b).map(toKeyDep(_, key = "tikzsvg"))
+
+ case guse(r, b) => mkDep(archive, r, entryToPath(b))
+
+ // These three are supposed to work exactly alike, see https://github.com/UniFormal/MMT/issues/465.
+ case inputMhAssignment(_,r,b) => mhReposClosure(archive, parents, r, b)
+ case includeMhAssignment(_,r,b) => mhReposClosure(archive, parents, r, b)
+ case includeMhProblem(_, r, b) => mhReposClosure(archive, parents, r, b)
+
case _ => Nil
}
+ }
private def mkImport(a: Archive, r: String, p: String, s: String, ext: String): String =
"\\importmodule[load=" + a.root.up.up + "/" + r + "/source/" + p + ",ext=" + ext + "]" + s + "%"
@@ -129,11 +207,12 @@ trait STeXAnalysis {
private def createImport(r: String, p: String): STeXStructure =
STeXStructure(List("\\importmodule[" + r + "]{" + p + "}%"), Nil)
- /** create sms file */
- def createSms(a: Archive, inFile: File, outFile: File) {
- val smsLines = mkSTeXStructure(a, inFile, readSourceRebust(inFile).getLines, Set.empty).smslines
- if (smsLines.nonEmpty) File.write(outFile, smsLines.reverse.mkString("", "\n", "\n"))
- else log("no sms content")
+ /** Collect sms content and write to outFile. */
+ def createSms(a: Archive, inFile: File, outFile: File) : Unit = {
+ val smsContent = mkSTeXStructure(a, inFile, readSourceRebust(inFile).getLines, Set.empty).smslines
+
+ if (smsContent.isEmpty) {log("no sms content")}
+ else { File.write(outFile, smsContent.reverse.mkString("", "\n", "\n")) }
}
/** get dependencies */
@@ -144,47 +223,82 @@ trait STeXAnalysis {
else Nil
}
- /** in file is used for relative \input paths */
- def mkSTeXStructure(a: Archive, in: File, lines: Iterator[String], parents: Set[File]): STeXStructure = {
- var struct = STeXStructure(Nil, Nil)
- def join(s: STeXStructure) = {
- struct = struct.join(s)
+ /** Method for creating STeXStructures, used for generating sms content and finding dependencies.
+ * inFile is used for relative \input paths */
+ def mkSTeXStructure(a: Archive, in: File, lines: Iterator[String], parents: Set[File]): STeXStructure =
+ {
+ var localStruct = STeXStructure(Nil,Nil)
+
+ def combine(s: STeXStructure) : Unit = {
+ localStruct = localStruct <> s
}
lines.foreach { line =>
val l = stripComment(line).trim
- val verbIndex = l.indexOf("\\verb")
- if (verbIndex <= -1) {
+ val isVerbatim = l.contains("\\verb")
+ if (!isVerbatim)
+ {
val sl = matchSmsEntry(a, l)
- sl.foreach(join)
- if (key != "sms") {
- val od = matchPathAndRep(a, in, l, parents)
- od.foreach(d => join(STeXStructure(Nil, List(d))))
+ sl.foreach(combine)
+ if (key != "sms")
+ {
+ val od : List[Dependency] = matchPathAndRepo(a, in, l, parents)
+ // Forget all tikzsvg dependencies unless we're inside a latexml target.
+ val odp : List[Dependency] = if (key == "latexml") { od } else { od.filter {
+ case FileBuildDependency(akey, _, _) => akey != "tikzsvg"
+ case _ => true
+ }}
+ odp.foreach(d => combine(STeXStructure(Nil, List(d))))
}
}
}
- struct
+ localStruct
}
- def matchSmsEntry(a: Archive, line: String): List[STeXStructure] = {
- line match {
+ def matchSmsEntry(a: Archive, line: String): List[STeXStructure] =
+ {
+ line match
+ {
case importMhModule(r, b) =>
createMhImport(a, r, b)
+
+ case useMhModule(opt,_) =>
+ val argmap = getArgMap(opt)
+ if (argmap.contains("path")) {
+ val deps = mkDep(a,argmap.getOrElse("repos",archString(a)),entryToPath(argmap("path")))
+ assert(deps.length == 1)
+ List(STeXStructure(Nil,List(toKeyDep(deps.head,key = "sms"))))
+ } else { Nil }
+
case gimport(_, r, p) =>
List(createGImport(a, r, p))
+
+ case guse(opt,_) =>
+ val argmap = getArgMap(opt)
+ if (argmap.contains("path")) {
+ val deps = mkDep(a,argmap.getOrElse("repos",archString(a)),entryToPath(argmap("path")))
+ assert(deps.length == 1)
+ List(STeXStructure(Nil,List(toKeyDep(deps.head,key = "sms"))))
+ } else { Nil }
+
case smsGStruct(_, r, _, p) =>
List(createGImport(a, r, p))
+
case smsMhStruct(r, _, p) =>
createMhImport(a, r, p)
+
case smsSStruct(r, _, p) =>
List(createImport(r, p))
+
case smsViewsig(r, _, f, t) =>
val m = getArgMap(r)
val fr = m.getOrElse("fromrepos", archString(a))
val tr = m.getOrElse("torepos", archString(a))
List(mkGImport(a, fr, f), mkGImport(a, tr, t))
+
case smsViewnl(_, r, p) =>
List(createGImport(a, archString(a), p))
+
case smsMhView(r, _, f, t) =>
val m = getArgMap(r)
var ofp = m.get("frompath")
@@ -196,6 +310,7 @@ trait STeXAnalysis {
List(mkMhImport(a, fr, fp, f), mkMhImport(a, tr, tp, t))
case _ => Nil
}
+
case smsView(r, f, t) =>
val m = getArgMap(r)
val ofr = m.get("from")
@@ -205,7 +320,9 @@ trait STeXAnalysis {
List(createImport(fr, f), createImport(tr, t))
case _ => Nil
}
+
case _ if smsRegs.findFirstIn(line).isDefined => List(STeXStructure(List(line + "%"), Nil))
+
case _ => Nil
}
}
diff --git a/src/mmt-stex/src/info/kwarc/mmt/stex/STeXUtils.scala b/src/mmt-stex/src/info/kwarc/mmt/stex/STeXUtils.scala
index ca3de52fbd..062789d7f6 100644
--- a/src/mmt-stex/src/info/kwarc/mmt/stex/STeXUtils.scala
+++ b/src/mmt-stex/src/info/kwarc/mmt/stex/STeXUtils.scala
@@ -6,18 +6,16 @@ import info.kwarc.mmt.api.utils._
import scala.io.{BufferedSource, Codec}
import scala.util.matching.Regex
-object STeXUtils {
+object STeXUtils
+{
val c : String = java.io.File.pathSeparator
- def mathHubDir(bt: BuildTask): File = bt.archive.root.up.up.up
+ def mathHubDir(bt: BuildTask) : File = bt.archive.root.up.up.up
+ def extBase(bt: BuildTask) : File = mathHubDir(bt) / "ext"
+ def stexStyDir(bt: BuildTask) : File = extBase(bt) / "sTeX" / "sty"
+ def styPath(bt: BuildTask) : File = mathHubDir(bt) / "sty"
- def extBase(bt: BuildTask): File = mathHubDir(bt) / "ext"
-
- def stexStyDir(bt: BuildTask): File = extBase(bt) / "sTeX" / "sty"
-
- def styPath(bt: BuildTask): File = mathHubDir(bt) / "sty"
-
- def sysEnv(v: String): String = sys.env.getOrElse(v, "")
+ def sysEnv(v: String) : String = sys.env.getOrElse(v, "")
def env(bt: BuildTask): List[(String, String)] = {
val sty = "STEXSTYDIR"
@@ -56,8 +54,7 @@ object STeXUtils {
else getLangAmbleFile(ambleFile(groupMetaInf(a)), lang)
}
- def readSourceRebust(f: File): BufferedSource =
- scala.io.Source.fromFile(f)(Codec.UTF8)
+ def readSourceRebust(f: File): BufferedSource = scala.io.Source.fromFile(f)(Codec.UTF8)
def stripComment(line: String): String = {
val idx = line.indexOf('%')
@@ -79,35 +76,49 @@ object STeXUtils {
private def begin(s: String): String = "begin\\{" + s + "\\}"
- private val space = "\\s*"
- private val opt = "\\[(.*?)\\]"
- private val opt0 = "(" + opt + ")?"
- private val arg = space + "\\{(.*?)\\}"
- private val any = ".*"
- private val arg1 = arg + any
- private val optArg1 = opt0 + arg1
- private val bs = "\\\\"
- private val oStar = "\\*?"
- val input: Regex = (bs + "(lib)?input" + oStar + optArg1).r
- val includeGraphics: Regex = (bs + "includegraphics" + oStar + optArg1).r
- val importOrUseModule: Regex = (bs + "(import|use)Module" + opt + any).r
- val guse: Regex = (bs + "guse" + opt + arg1).r
- val useMhProblem: Regex = (bs + "includemhproblem" + optArg1).r
- val includeMhProblem: Regex = (bs + "includemhproblem" + optArg1).r
- val beginModnl: Regex = (bs + begin("m?h?modnl") + optArg1).r
- val mhinputRef: Regex = (bs + "m?h?inputref" + optArg1).r
- val tikzinput: Regex = (any + bs + "c?m?h?tikzinput" + optArg1).r
- private val smsKeys: List[String] = List("gadopt", "symvariant", "gimport") ++
+ private val space : String = "\\s*"
+ private val opt : String = "\\[(.*?)\\]"
+ private val opt0 : String = "(" + opt + ")?"
+ private val arg : String = space + "\\{(.*?)\\}"
+ private val any : String = ".*"
+ private val arg1 : String = arg + any
+ private val optArg1 : String = opt0 + arg1
+ private val bs : String = "\\\\" // backslash
+ private val oStar : String = "\\*?"
+
+ val input : Regex = (bs + "(lib)?input" + oStar + optArg1).r
+ val includeGraphics : Regex = (bs + "includegraphics" + oStar + optArg1).r
+ val importOrUseModule : Regex = (bs + "(import|use)Module" + opt + any).r
+
+ val useMhModule : Regex = (bs + "usemhmodule" + opt + arg1).r
+ val guse : Regex = (bs + "guse" + opt + arg1).r
+
+ val importMhModule : Regex = (bs + "importmhmodule" + opt + "(.*?)").r
+ val gimport : Regex = (bs + "gimport" + oStar + optArg1).r
+
+ val inputAssignment : Regex = (bs + "inputassignment" + optArg1).r
+ val includeAssignment : Regex = (bs + "includeassignment" + optArg1).r
+ val includeProblem : Regex = (bs + "includeproblem" + optArg1).r
+
+ val inputMhAssignment : Regex = (bs + "inputmhassignment" + optArg1).r
+ val includeMhAssignment : Regex = (bs + "includemhassignment" + optArg1).r
+ val includeMhProblem : Regex = (bs + "includemhproblem" + optArg1).r
+
+ val beginModnl : Regex = (bs + begin("(?:mh)?modnl") + optArg1).r
+ val mhinputRef : Regex = (bs + "n?(?:mh)?inputref" + optArg1).r
+ val tikzinput : Regex = (any + bs + "c?(?:mh)?tikzinput" + optArg1).r
+
+ private val smsKeys : List[String] = List("gadopt", "symvariant", "gimport") ++
List("sym", "abbr", "key", "listkey").map(_ + "def") ++
List("import", "adopt", "adoptmh").map(_ + "module")
+
private val smsTopKeys: List[String] = List("module", "importmodulevia", "importmhmodulevia")
+
val smsRegs: Regex = {
val begins: String = begin(mkRegGroup(smsTopKeys))
val ends: String = smsTopKeys.mkString("|end\\{(", "|", ")\\}")
("^\\\\(" + mkRegGroup(smsKeys) + "|" + begins + ends + ")").r
}
- val importMhModule: Regex = (bs + "importmhmodule" + opt + "(.*?)").r
- val gimport: Regex = (bs + "gimport" + oStar + optArg1).r
private def optArg2(s: String): String = bs + begin(s) + opt + arg + arg
diff --git a/src/project/build.properties b/src/project/build.properties
index 9f782f7048..72f902892a 100644
--- a/src/project/build.properties
+++ b/src/project/build.properties
@@ -1 +1 @@
-sbt.version=1.1.1
\ No newline at end of file
+sbt.version=1.2.7
diff --git a/src/test/Translator.scala b/src/test/Translator.scala
new file mode 100644
index 0000000000..e3e753da6d
--- /dev/null
+++ b/src/test/Translator.scala
@@ -0,0 +1,32 @@
+
+import info.kwarc.mmt.api.{GeneralError, Level}
+import info.kwarc.mmt.api.archives.Update
+import info.kwarc.mmt.api.web._
+import info.kwarc.mmt.api.presentation.MMTSyntaxPresenter
+import info.kwarc.mmt.api.utils.{EmptyList, File, FilePath}
+import info.kwarc.mmt.api.ontology
+import info.kwarc.mmt.api.ontology.RelationalReader
+import info.kwarc.mmt.api.archives.Archive
+
+import info.kwarc.mmt.api.archives.Archive
+import info.kwarc.mmt.api._
+import info.kwarc.mmt.api.documents.{Document, NRef}
+import info.kwarc.mmt.api.frontend.{Controller, Extension, FormatBasedExtension}
+import info.kwarc.mmt.api.modules._
+import info.kwarc.mmt.api.objects.{OMID, OMMOD, OMS}
+import info.kwarc.mmt.api.ontology.Declares._
+import info.kwarc.mmt.api.ontology._
+import info.kwarc.mmt.api.presentation.{HTMLPresenter, MMTDocExporter, MathMLPresenter, StructurePresenter}
+import info.kwarc.mmt.api.symbols._
+import info.kwarc.mmt.api.utils._
+
+
+object Graphtester extends MagicTest("jgraph") {
+ def run : Unit = {
+ //val test = WebQuery("type=archivegraph&graphdata=MMT/urtheories&semantic=grounded")
+ // println(test)
+ //println( test("graphdata"))
+ //val serve = new JSONBasedGraphServer()
+ }
+}
+
diff --git a/src/test/preamble.scala b/src/test/preamble.scala
index c00b7bc60f..6898dcd285 100644
--- a/src/test/preamble.scala
+++ b/src/test/preamble.scala
@@ -117,6 +117,8 @@ object MagicTest {
home / "Projects" / "gl.mathhub.info", // Tom
home / "Development" / "KWARC" / "content", // Jonas
home / "content", // Michael
+ //File("C:/mmt2/content/Mathhub"), //Max
+ File("C:") / "/mmt2" / "/content" / "/MathHub", // Max
File("C:") / "other" / "oaff",
).find(_.exists).getOrElse(throw GeneralError("MagicTest failed: No known archive root"))
}
@@ -132,7 +134,10 @@ object MagicTest {
lazy val logfile: Option[File] = {
if((home / "work").exists){
Some(home / "work" / "mmtlog.html") // Dennis
- } else {
+ } // else if ((File("C:") / "/mmt2" / "/My stuff").exists) {
+ // Some(File("C:") / "/mmt2" / "/My stuff"/"mmtlog.html") // Max
+ //}
+ else {
None
}
}