From c1915343327faf27dd0d1c9510dfe59e1a8ed3d8 Mon Sep 17 00:00:00 2001 From: tnier01 Date: Fri, 24 Apr 2020 15:51:07 +0200 Subject: [PATCH] new test controls if diff img is in the diff html included --- test/TestPapers_TextDiff/htmlDiffTest.html | 173 +++++++++++++++++++++ test/testModuleUsage.js | 32 +++- 2 files changed, 201 insertions(+), 4 deletions(-) create mode 100644 test/TestPapers_TextDiff/htmlDiffTest.html diff --git a/test/TestPapers_TextDiff/htmlDiffTest.html b/test/TestPapers_TextDiff/htmlDiffTest.html new file mode 100644 index 0000000..8878e67 --- /dev/null +++ b/test/TestPapers_TextDiff/htmlDiffTest.html @@ -0,0 +1,173 @@ + + + + + + + + +
+ + + + + + + + + + + + + + +

Abstract. Ice cores provide insight into the past climate over many millennia.Cream is amazing, and not only during summer. Due to ice compaction, the raw data for any single core are irregular in time. Multiple cores have different irregularities; and when considered together, they are misaligned in time. After processing, such data are made available to researchers as regular time series: a data product. Typically, these cores are independently processed. This paper considers a fast Bayesian method for the joint processing of multiple irregular series. This is shown to be more efficient than the independent alternative. Furthermore, our explicit framework permits a reliable modelling of the impact of the multiple sources of uncertainty. The methodology is illustrated with the analysis of a pair of ice cores. Our data products, in the form of posterior marginals or joint distributions on an arbitrary temporal grid, are finite Gaussian mixtures. We can also produce process histories to study non-linear functionals of interest. More generally, the concept of joint analysis via hierarchical Gaussian process models can be widely extended, as the models used can be viewed within the larger context of continuous space–time processes.

+
## --------------------------------------------------------------
+##  Analysis of Geostatistical Data
+##  For an Introduction to geoR go to http://www.leg.ufpr.br/geoR
+##  geoR version 1.7-5.2 (built on 2016-05-02) is now loaded
+## --------------------------------------------------------------
+

+
## 25% 50% 75% 
+## 2.8 3.5 4.6
+
##   25%   50%   75% 
+##  9.94 12.52 16.78
+
## 25% 50% 75% 
+## 2.8 3.5 4.6
+
##   25%   50%   75% 
+##  9.94 12.52 16.78
+
###################
+### Variograms calculations
+var.gisp <- as.geodata(data.frame(coords = t(rbind(age.gisp, rep(1, n.gisp))), data=d18O.gisp))
+var.grip <- as.geodata(data.frame(coords = t(rbind(age.grip, rep(1, n.grip))), data=d18O.grip))
+gisp.vario.b <- variog(var.gisp, uvec=seq(0, 3.5, 0.1))  # binned variogram of the Holocene
+
## variog: computing omnidirectional variogram
+
grip.vario.b <- variog(var.grip, uvec=seq(0, 3.5, 0.1))
+
## variog: computing omnidirectional variogram
+
# Plots of the empirical semivariograms
+par(mar=c(3,3,2,1), mgp=c(2,.7,0), tck=-.01)
+plot(gisp.vario.b$u, gisp.vario.b$v, xlab='', ylab='',col=2,pch=18, bty='n', cex=0.6,
+     ylim=c(0,0.6), lwd=2, cex.lab=0.7, cex.axis=0.7, xaxs="i",yaxs="i", las=1)
+points(grip.vario.b$u, grip.vario.b$v, pch=20, col=4, lwd=2, cex=0.6)
+mtext('Lag (k yr)', side=1, line=2, cex=0.7) 
+mtext('Semivariance', side=2, line=2, cex=0.7) 
+legend('bottom', c('GISP2','GRIP'), pch=c(18,20), lty=-1, col=c(2,4), cex=0.6, bty='y', lwd=2)
+

+
# Weighted least squared method to estimate parameters, assuming the linear variogram model is a correct 
+# data generation mechanism
+init.gisp <- variofit(gisp.vario.b, cov.model='linear', weights='cressie')  
+
## variofit: covariance model used is linear 
+## variofit: weights used: cressie 
+## variofit: minimisation function used: optim
+
## Warning in variofit(gisp.vario.b, cov.model = "linear", weights =
+## "cressie"): initial values not provided - running the default search
+
## variofit: searching for best initial value ... selected values:
+##               sigmasq phi   tausq  kappa
+## initial.value "0.26"  "0"   "0.13" "0.5"
+## status        "est"   "est" "est"  "fix"
+## loss value: 7680.06930431104
+
init.grip <- variofit(grip.vario.b, cov.model='linear', weights='cressie') 
+
## variofit: covariance model used is linear 
+## variofit: weights used: cressie 
+## variofit: minimisation function used: optim
+
## Warning in variofit(grip.vario.b, cov.model = "linear", weights =
+## "cressie"): initial values not provided - running the default search
+
## variofit: searching for best initial value ... selected values:
+##               sigmasq phi   tausq  kappa
+## initial.value "0.57"  "0"   "0.28" "0.5"
+## status        "est"   "est" "est"  "fix"
+## loss value: 330588.190603372
+
sigmasq.gisp.est <- max(init.gisp$nugget, 1E-6)  # tau.gisp <- 1/(0.14^2)
+sigmasq.grip.est <- max(init.grip$nugget, 1E-6)  # tau.grip <- 1/(0.05^2)
+v.gisp.est <- max(1E-6, init.gisp$cov.pars[1])*2
+v.grip.est <- max(1E-6, init.grip$cov.pars[1])*2
+
+#scaled distribution of first differences
+gisp.qq <- diff(d18O.gisp)/sqrt(v.gisp.est*diff(age.gisp)+ 2*sigmasq.gisp.est)
+grip.qq <- diff(d18O.grip)/sqrt(v.grip.est*diff(age.grip)+ 2*sigmasq.grip.est)
+
+gisp.probs = (1:(n.gisp-1))/(n.gisp)
+n.quant1 = qnorm(gisp.probs, mean(gisp.qq, na.rm = T), sd(gisp.qq, na.rm = T))
+grip.probs = (1:(n.grip-1))/(n.grip)
+n.quant2 = qnorm(grip.probs, mean(grip.qq, na.rm = T), sd(grip.qq, na.rm = T))
+
# QQ plots
+par(mar=c(3,3,2,1), mgp=c(2,.7,0), tck=-.01)
+par(mfrow=c(1,2))
+plot(sort(gisp.qq) , sort(n.quant1), ylab = '', xlab = '', 
+     cex.lab=0.7, cex.axis=0.7, las=1, bty='n', col=2, pch=21, axes=F, xlim=c(-5.4,4), cex=.6)
+abline(0,1, lwd=2, col=3)
+title(outer=F, adj=.1, main="a", cex.main=0.9, col="black", font=2, line=0)
+mtext("Theoretical quantiles", side=2, line=2, at=0, font=1, cex=0.7) 
+mtext("Empirical quantiles", side=1, line=2, at=0, las=1, font=1, cex=0.7)
+axis(side=1, at = seq(-4, 4, by=2), labels=T, cex.axis=0.7, las=1)
+axis(side=2, at = seq(-4, 4, by=2), labels=T, cex.axis=0.7, las=1)
+
+plot(sort(grip.qq) , sort(n.quant2), ylab = '', xlab = '', cex=.6,
+     cex.lab=0.7, cex.axis=0.7, las=1, bty='n', col=4, pch=21, axes=F, xlim=c(-5.4,4))
+abline(0,1, lwd=2, col=3)
+mtext("Theoretical quantiles", side=2, line=2, at=0, font=1, cex=0.7) 
+mtext("Empirical quantiles", side=1, line=2, at=0, las=1, font=1, cex=0.7)
+title(outer=F, adj=.1, main="b", cex.main=0.9, col="black", font=2, line=0)
+axis(side=1, at = seq(-4, 4, by=2), labels=T, cex.axis=0.7, las=1)
+axis(side=2, at = seq(-4, 4, by=2), labels=T, cex.axis=0.7, las=1)
+ +// add bootstrap table styles to pandoc tables +function bootstrapStylePandocTables() { + $('tr.header').parent('thead').parent('table').addClass('table table-condensed'); +} +$(document).ready(function () { + bootstrapStylePandocTables(); +}); + + + + + + + + + diff --git a/test/testModuleUsage.js b/test/testModuleUsage.js index 2a75cc6..89e4628 100644 --- a/test/testModuleUsage.js +++ b/test/testModuleUsage.js @@ -255,6 +255,30 @@ describe('Using ERC-Checker as node-module', function () { }); }); + describe('For a check on two papers containing equal amount of, but differing images, and "createParentDirectories" flag set', function () { + it('should successfully write a "diffHTML.html" file to the directory specified as Absolute Path which includes the diff image and the diff text highlighted.', function () { + let configSaveMeta = Object.assign({}, checkConfig); + configSaveMeta.pathToOriginalHTML = testStringA; + configSaveMeta.pathToReproducedHTML = testStringB; + configSaveMeta.saveFilesOutputPath = tmp.dirSync().name; + configSaveMeta.saveDiffHTML = true; + configSaveMeta.saveMetadataJSON = true; + configSaveMeta.createParentDirectories = true; + + return checker(configSaveMeta) + .then(function () { + let htmlOutpath = path.join(configSaveMeta.saveFilesOutputPath, 'diffHTML.html'); + let savedHTMLFileContent = fs.readFileSync(htmlOutpath, 'utf-8'); + let referenceHTMLFileContent = fs.readFileSync('./test/TestPapers_TextDiff/htmlDiffTest.html', 'utf-8') + + assert.deepStrictEqual(referenceHTMLFileContent, savedHTMLFileContent, 'Saved html file varries from original check result html!'); + }, + function (reason) { + assert.ifError(reason); + }); + }).timeout(10000); + }); + describe('With "saveMetadataJSON" flag set to "true", and "saveFilesOutputPath" given in the config object', function () { describe('for a check on two papers containing equal amount of, but differing images, and "createParentDirectories" flag set', function () { @@ -269,7 +293,7 @@ describe('Using ERC-Checker as node-module', function () { return checker(configSaveMeta) .then(function () { - let jsonOutpath = path.join(configSaveMeta.saveFilesOutputPath, "metadata.json"); + let jsonOutpath = path.join(configSaveMeta.saveFilesOutputPath, 'metadata.json'); try { fs.accessSync(jsonOutpath); @@ -285,7 +309,7 @@ describe('Using ERC-Checker as node-module', function () { }); describe('for a check on two papers containing equal amount of, but differing images, and "createParentDirectories" flag set', function () { - it('should successfully write a "metadata.json" which is matching a reference file', function () { + it('should successfully write a "metadata.json" which is matching to the original check result metadata.', function () { let configSaveMeta = Object.assign({}, checkConfig); configSaveMeta.pathToOriginalHTML = testStringA; configSaveMeta.pathToReproducedHTML = testStringB; @@ -296,7 +320,7 @@ describe('Using ERC-Checker as node-module', function () { return checker(configSaveMeta) .then(function (resultMetadata) { - let jsonOutpath = path.join(configSaveMeta.saveFilesOutputPath, "metadata.json"); + let jsonOutpath = path.join(configSaveMeta.saveFilesOutputPath, 'metadata.json'); let savedJSONFileContent; let resMeta = resultMetadata; @@ -305,7 +329,7 @@ describe('Using ERC-Checker as node-module', function () { resMeta = JSON.stringify(resMeta); savedJSONFileContent = JSON.stringify(savedJSONFileContent); - assert.deepStrictEqual(resMeta, savedJSONFileContent, "Saved metadata.json file content varries from original check result metadata:"); + assert.deepStrictEqual(resMeta, savedJSONFileContent, "Saved metadata.json file content varries from original check result metadata."); }, function (reason) { assert.ifError(reason);