diff --git a/.vscode/launch.json b/.vscode/launch.json index 8e1c32bab..662e79b55 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,7 +1,4 @@ { - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 "version": "0.2.0", "configurations": [ { diff --git a/charts/application.yaml.default b/charts/application.yaml.default index 75e3f5635..f57e3f1f4 100644 --- a/charts/application.yaml.default +++ b/charts/application.yaml.default @@ -104,41 +104,6 @@ spring: callbackUrl: AAF_CALLBACK_URL jwtsecret: AAF_SECRET -# Cloud specific settings -cloud: - localStageInDir: C:\\temp\\vegl-portal-temp - sssc: - solutions: - url: https://sssc-vgl.geoanalytics.csiro.au/ - defaultToolbox: http://ec2-54-206-9-187.ap-southeast-2.compute.amazonaws.com/scm/toolbox/3 - proms: - report: - url: http://proms-dev1-vc.it.csiro.au/id/report/ - reportingsystem: - uri: http://proms-dev1-vc.it.csiro.au/rs - - # Gives us separation from real DB passwords. - encryption: - password: ABCdefGHI - - # AWS keys to be used for dev only. Production system to use AWS roles for authorization - aws: - account: AWS_ACCOUNT - accesskey: AWS_ACCESS_KEY - secretkey: AWS_SECRET_KEY - sessionkey: AWS_SESSION_KEY - stsrequirement: Mandatory - - # Cloud VM files - vm: - sh: https://raw.githubusercontent.com/AuScope/ANVGL-Portal/master/vm/vl.sh - vm-shutdown: - sh: https://raw.githubusercontent.com/AuScope/ANVGL-Portal/master/vm/vl-shutdown.sh - - erddapservice: - url: http://siss2.anu.edu.au/erddap/griddap/ - stripeApiKey: STRIPE_API_KEY - env: stackdriver: enable: false diff --git a/pom.xml b/pom.xml index 318b82386..9851925c3 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ org.auscope.portal auscope-portal-api war - 6.6.0-SNAPSHOT + 6.6.1-SNAPSHOT AuScope-Portal-API AuScope's flagship portal https://portal.auscope.org.au @@ -36,7 +36,7 @@ 2.0.5 2.17.1 2.6.5 - 2.8.0-SNAPSHOT + 2.8.1-SNAPSHOT 4.5.13 8.0.14 @@ -77,42 +77,17 @@ - - - com.squareup.okhttp3 - okhttp - 4.10.0 + org.yaml + snakeyaml + 1.33 - - commons-beanutils - commons-beanutils - 1.9.4 - - - com.google.cloud - libraries-bom - 22.0.0 - pom - import - - - com.google.cloud - google-cloud-storage - 2.22.5 - - - net.bytebuddy - byte-buddy - - - - + org.springframework.boot spring-boot-starter @@ -153,18 +128,6 @@ org.springframework spring-context-support - - com.jcraft - jsch - 0.1.55 - - - - proms-client-java - proms-client-java - 0.0.2-SNAPSHOT - - org.auscope.portal portal-core @@ -197,36 +160,6 @@ 1.0 - - org.apache.taglibs - taglibs-standard-impl - 1.2.5 - - - - - commons-fileupload - commons-fileupload - 1.5 - - - commons-io - commons-io - 2.8.0 - - - - org.apache.commons - commons-collections4 - 4.4 - - - - org.javassist - javassist - 3.27.0-GA - - org.hsqldb @@ -239,6 +172,7 @@ opencsv 5.7.1 + org.json json @@ -257,122 +191,24 @@ - - - org.apache.velocity - velocity - 1.7 - - - - edu.ucar - netcdf4 - 5.3.3 - - - - - org.powermock - powermock-api-support - ${powermock.version} - test - - - org.powermock - powermock-module-junit4 - ${powermock.version} - test - - - - - org.powermock - powermock-api-mockito2 - ${powermock.version} - test - - - - - - - - - - - - - - javax.mail - javax.mail-api - 1.6.2 - - mysql mysql-connector-java 8.0.28 + org.postgresql postgresql runtime + org.apache.jena apache-jena-libs 4.10.0 pom - - com.google.inject - guice - 5.0.1 - - - com.google.inject.extensions - guice-assistedinject - 5.0.1 - - - org.apache.jclouds - jclouds-all - 2.4.0 - - - javax.annotation - jsr250-api - - - com.google.inject - guice - - - com.google.inject.extensions - guice-assistedinject - - - - - - javax.annotation - javax.annotation-api - 1.3.2 - - - com.stripe - stripe-java - 20.41.0 - org.jmock @@ -390,17 +226,14 @@ + org.jmock jmock-imposters 2.12.0 test - - org.ow2.asm - asm - 9.1 - + org.xmlunit xmlunit-core @@ -412,45 +245,22 @@ 2.6.3 test - - - - org.seleniumhq.selenium - selenium-java - 4.9.1 - test - - - xml-apis - xml-apis - - - + com.fasterxml.jackson.core jackson-core - - org.testng - testng - 7.7.0 - test - com.fasterxml.jackson.core jackson-databind 2.13.4.2 + com.azure.spring spring-cloud-azure-appconfiguration-config 5.5.0 - - com.squareup.okhttp3 - okhttp - 4.12.0 - @@ -484,20 +294,7 @@ - - maven-surefire-plugin - 3.0.0-M9 - - methods - 10 - true - - - org/auscope/portal/ui/* - org/auscope/portal/ui/mobile/* - - - + io.github.git-commit-id git-commit-id-maven-plugin diff --git a/src/main/db/mysql/add_job_annotations.sql b/src/main/db/mysql/add_job_annotations.sql deleted file mode 100644 index c4868ecae..000000000 --- a/src/main/db/mysql/add_job_annotations.sql +++ /dev/null @@ -1,9 +0,0 @@ -CREATE TABLE `job_annotations` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `job_id` int(11) NOT NULL, - `value` varchar(255) NOT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY (`job_id`) - REFERENCES jobs(`id`) - ON DELETE CASCADE -); diff --git a/src/main/db/mysql/auscope-schema-mysql.sql b/src/main/db/mysql/auscope-schema-mysql.sql index 74865616d..802c375f0 100644 --- a/src/main/db/mysql/auscope-schema-mysql.sql +++ b/src/main/db/mysql/auscope-schema-mysql.sql @@ -1,83 +1,12 @@ -DROP TABLE IF EXISTS `authorities`; -DROP TABLE IF EXISTS `job_solutions`; -DROP TABLE IF EXISTS `downloads`; -DROP TABLE IF EXISTS `jobs_audit_log`; -DROP TABLE IF EXISTS `parameters`; -DROP TABLE IF EXISTS `job_purchases`; -DROP TABLE IF EXISTS `job_annotations`; -DROP TABLE IF EXISTS `jobs`; -DROP TABLE IF EXISTS `series`; -DROP TABLE IF EXISTS `hashmap_params`; -DROP TABLE IF EXISTS `bookmark_download_options`; -DROP TABLE IF EXISTS `bookmarks`; -DROP TABLE IF EXISTS `data_purchases`; -DROP TABLE IF EXISTS `users`; -DROP TABLE IF EXISTS `nci_details`; - - CREATE TABLE `users` ( `id` varchar(128) NOT NULL, `fullName` varchar(256) DEFAULT NULL, `email` varchar(256) DEFAULT NULL, - `arnExecution` varchar(128) DEFAULT NULL, - `arnStorage` varchar(128) DEFAULT NULL, - `awsSecret` varchar(128) DEFAULT NULL, `acceptedTermsConditions` int(11) DEFAULT NULL, - `awsAccount` varchar(128) DEFAULT NULL, - `awsKeyName` varchar(45) DEFAULT NULL, - `s3Bucket` varchar(64) DEFAULT NULL, PRIMARY KEY (`id`) USING BTREE, UNIQUE KEY `EMAIL` (`email`) USING BTREE ); -CREATE TABLE `series` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `user` varchar(255) NOT NULL, - `name` varchar(255) NOT NULL, - `description` varchar(255) DEFAULT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY (`user`) - REFERENCES users(`email`) - ON DELETE CASCADE ON UPDATE CASCADE -); - -CREATE TABLE `jobs` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `name` varchar(255) DEFAULT NULL, - `description` varchar(255) DEFAULT NULL, - `emailAddress` varchar(255) DEFAULT NULL, - `user` varchar(255) DEFAULT NULL, - `submitDate` datetime DEFAULT NULL, - `status` varchar(255) DEFAULT NULL, - `computeVmId` varchar(255) DEFAULT NULL, - `computeInstanceId` varchar(255) DEFAULT NULL, - `computeInstanceType` varchar(255) DEFAULT NULL, - `computeInstanceKey` varchar(255) DEFAULT NULL, - `registeredUrl` varchar(255) DEFAULT NULL, - `seriesId` int(11) DEFAULT NULL, - `storageBaseKey` varchar(255) DEFAULT NULL, - `computeServiceId` varchar(45) DEFAULT NULL, - `storageServiceId` varchar(45) DEFAULT NULL, - `processDate` datetime DEFAULT NULL, - `emailNotification` char(1) DEFAULT 'N', - `processTimeLog` varchar(255) DEFAULT '', - `storageBucket` varchar(64) DEFAULT NULL, - `walltime` int(11) DEFAULT NULL, - `executeDate` datetime DEFAULT NULL, - `folderId` int(11) DEFAULT NULL, - `containsPersistentVolumes` char(1) DEFAULT 'N', - `promsReportUrl` varchar(255) DEFAULT NULL, - `computeVmRunCommand` varchar(64) DEFAULT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY `SERIES` (`seriesId`) - REFERENCES series(`id`) - ON DELETE CASCADE, - KEY `JOB_LIST` (`emailAddress`,`status`,`folderId`), - FOREIGN KEY (`emailAddress`) - REFERENCES users(`email`) - ON DELETE CASCADE ON UPDATE CASCADE -); - CREATE TABLE `authorities` ( `id` int(11) NOT NULL AUTO_INCREMENT, `authority` varchar(32) DEFAULT NULL, @@ -88,69 +17,6 @@ CREATE TABLE `authorities` ( ON DELETE CASCADE ); -CREATE TABLE job_solutions ( - job_id int(11) NOT NULL, - solution_id varchar(255) NOT NULL, - FOREIGN KEY (`job_id`) - REFERENCES jobs(`id`) - ON DELETE CASCADE -); - -CREATE TABLE `downloads` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `jobId` int(11) NOT NULL, - `url` varchar(4096) NOT NULL, - `localPath` varchar(1024) NOT NULL, - `name` varchar(128) DEFAULT NULL, - `description` varchar(1024) DEFAULT NULL, - `northBoundLatitude` double DEFAULT NULL, - `southBoundLatitude` double DEFAULT NULL, - `eastBoundLongitude` double DEFAULT NULL, - `westBoundLongitude` double DEFAULT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY `jobId` (`jobId`) - REFERENCES jobs(`id`) - ON DELETE CASCADE -); - -CREATE TABLE `jobs_audit_log` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `jobId` int(11) NOT NULL, - `fromStatus` varchar(255) DEFAULT NULL, - `toStatus` varchar(255) NOT NULL, - `transitionDate` datetime NOT NULL, - `message` varchar(1000) DEFAULT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY `jobId_audit` (`jobId`) - REFERENCES jobs(`id`) - ON DELETE CASCADE -); - -CREATE TABLE `parameters` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `jobId` int(11) NOT NULL, - `name` varchar(255) NOT NULL, - `value` varchar(4096) DEFAULT NULL, - `type` varchar(45) NOT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY `jobId_parameters` (`jobId`) - REFERENCES jobs(`id`) - ON DELETE CASCADE, - KEY `jobIdName` (`jobId`,`name`) -); - -CREATE TABLE `nci_details` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `user` varchar(128) DEFAULT NULL, - `nci_username` blob DEFAULT NULL, - `nci_key` blob DEFAULT NULL, - `nci_project` blob DEFAULT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY (`user`) - REFERENCES users(`id`) - ON DELETE CASCADE ON UPDATE CASCADE -); - CREATE TABLE `states` ( `id` varchar(128) NOT NULL, `userid` varchar(128) DEFAULT NULL, @@ -164,6 +30,7 @@ CREATE TABLE `states` ( REFERENCES users(`id`) ON DELETE CASCADE ON UPDATE CASCADE ); + CREATE TABLE `hashmap_params` ( `key` varchar(128) NOT NULL, `value` varchar(1000000) NOT NULL, @@ -196,48 +63,3 @@ CREATE TABLE `bookmark_download_options` ( KEY `ID_BOOKMARKS` (`bookmarkId`), CONSTRAINT `ID_BOOKMARKS` FOREIGN KEY (`bookmarkId`) REFERENCES `bookmarks` (`id`) ON DELETE CASCADE ); - -CREATE TABLE `data_purchases` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `date` datetime NOT NULL, - `amount` float NOT NULL, - `downloadUrl` varchar(4096) NOT NULL, - `cswRecord` text NOT NULL, - `onlineResourceType` varchar(25) NOT NULL, - `url` varchar(4096) NOT NULL, - `localPath` varchar(1024) DEFAULT NULL, - `name` varchar(128) DEFAULT NULL, - `description` varchar(1024) DEFAULT NULL, - `northBoundLatitude` double DEFAULT NULL, - `southBoundLatitude` double DEFAULT NULL, - `eastBoundLongitude` double DEFAULT NULL, - `westBoundLongitude` double DEFAULT NULL, - `paymentRecord` varchar(4096) NOT NULL, - `userId` varchar(128) NOT NULL, - PRIMARY KEY (`id`), - KEY `USER_ID_DATA_PURCHASES` (`userId`), - CONSTRAINT `USER_ID_DATA_PURCHASES` FOREIGN KEY (`userId`) REFERENCES `users` (`id`) ON DELETE CASCADE -); - -CREATE TABLE `job_purchases` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `date` datetime NOT NULL, - `amount` float NOT NULL, - `jobId` int(11) NOT NULL, - `jobName` varchar(128) DEFAULT NULL, - `paymentRecord` varchar(4096) NOT NULL, - `userId` varchar(128) NOT NULL, - PRIMARY KEY (`id`), - KEY `USER_ID_JOB_PURCHASES` (`userId`), - CONSTRAINT `USER_ID_JOB_PURCHASES` FOREIGN KEY (`userId`) REFERENCES `users` (`id`) ON DELETE CASCADE -); - -CREATE TABLE `job_annotations` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `job_id` int(11) NOT NULL, - `value` varchar(255) NOT NULL, - PRIMARY KEY (`id`), - FOREIGN KEY (`job_id`) - REFERENCES jobs(`id`) - ON DELETE CASCADE -); diff --git a/src/main/db/postgres/auscope-schema-pg.sql b/src/main/db/postgres/auscope-schema-pg.sql index 3ecaa1f7a..0d53192b9 100644 --- a/src/main/db/postgres/auscope-schema-pg.sql +++ b/src/main/db/postgres/auscope-schema-pg.sql @@ -1,86 +1,12 @@ -DROP TABLE IF EXISTS authorities; -DROP TABLE IF EXISTS job_solutions; -DROP TABLE IF EXISTS downloads; -DROP TABLE IF EXISTS jobs_audit_log; -DROP TABLE IF EXISTS parameters; -DROP TABLE IF EXISTS job_purchases; -DROP TABLE IF EXISTS job_annotations; -DROP TABLE IF EXISTS jobs; -DROP TABLE IF EXISTS series; -DROP TABLE IF EXISTS hashmap_params; -DROP TABLE IF EXISTS bookmark_download_options; -DROP TABLE IF EXISTS bookmarks; -DROP TABLE IF EXISTS data_purchases; -DROP TABLE IF EXISTS users; -DROP TABLE IF EXISTS nci_details; - CREATE TABLE users ( id varchar(128) NOT NULL, fullName varchar(256) DEFAULT NULL, email varchar(256) DEFAULT NULL, - arnExecution varchar(128) DEFAULT NULL, - arnStorage varchar(128) DEFAULT NULL, - awsSecret varchar(128) DEFAULT NULL, acceptedTermsConditions int DEFAULT NULL, - awsAccount varchar(128) DEFAULT NULL, - awsKeyName varchar(45) DEFAULT NULL, - s3Bucket varchar(64) DEFAULT NULL, PRIMARY KEY (id), CONSTRAINT EMAIL UNIQUE (email) ); -CREATE SEQUENCE series_seq; - -CREATE TABLE series ( - id int NOT NULL DEFAULT NEXTVAL ('series_seq'), - "user" varchar(255) NOT NULL, - "name" varchar(255) NOT NULL, - description varchar(255) DEFAULT NULL, - PRIMARY KEY (id), - FOREIGN KEY ("user") - REFERENCES users(email) - ON DELETE CASCADE ON UPDATE CASCADE -); - -CREATE SEQUENCE jobs_seq; - -CREATE TABLE jobs ( - id int NOT NULL DEFAULT NEXTVAL ('jobs_seq'), - "name" varchar(255) DEFAULT NULL, - description varchar(255) DEFAULT NULL, - emailAddress varchar(255) DEFAULT NULL, - "user" varchar(255) DEFAULT NULL, - submitDate timestamp(0) DEFAULT NULL, - status varchar(255) DEFAULT NULL, - computeVmId varchar(255) DEFAULT NULL, - computeInstanceId varchar(255) DEFAULT NULL, - computeInstanceType varchar(255) DEFAULT NULL, - computeInstanceKey varchar(255) DEFAULT NULL, - registeredUrl varchar(255) DEFAULT NULL, - seriesId int DEFAULT NULL, - storageBaseKey varchar(255) DEFAULT NULL, - computeServiceId varchar(45) DEFAULT NULL, - storageServiceId varchar(45) DEFAULT NULL, - processDate timestamp(0) DEFAULT NULL, - emailNotification char(1) DEFAULT 'N', - processTimeLog varchar(255) DEFAULT '', - storageBucket varchar(64) DEFAULT NULL, - walltime int DEFAULT NULL, - executeDate timestamp(0) DEFAULT NULL, - folderId int DEFAULT NULL, - containsPersistentVolumes char(1) DEFAULT 'N', - promsReportUrl varchar(255) DEFAULT NULL, - computeVmRunCommand varchar(64) DEFAULT NULL, - PRIMARY KEY (id), - FOREIGN KEY (seriesId) - REFERENCES series(id) - ON DELETE CASCADE, - --KEY JOB_LIST (emailAddress,status,folderId), - FOREIGN KEY (emailAddress) - REFERENCES users(email) - ON DELETE CASCADE ON UPDATE CASCADE -); - CREATE SEQUENCE authorities_seq; CREATE TABLE authorities ( @@ -93,76 +19,6 @@ CREATE TABLE authorities ( ON DELETE CASCADE ); -CREATE TABLE job_solutions ( - job_id int NOT NULL, - solution_id varchar(255) NOT NULL, - FOREIGN KEY (job_id) - REFERENCES jobs(id) - ON DELETE CASCADE -); - -CREATE SEQUENCE downloads_seq; - -CREATE TABLE downloads ( - id int NOT NULL DEFAULT NEXTVAL ('downloads_seq'), - jobId int NOT NULL, - url varchar(4096) NOT NULL, - localPath varchar(1024) NOT NULL, - "name" varchar(128) DEFAULT NULL, - description varchar(1024) DEFAULT NULL, - northBoundLatitude double precision DEFAULT NULL, - southBoundLatitude double precision DEFAULT NULL, - eastBoundLongitude double precision DEFAULT NULL, - westBoundLongitude double precision DEFAULT NULL, - PRIMARY KEY (id), - FOREIGN KEY (jobId) - REFERENCES jobs(id) - ON DELETE CASCADE -); - -CREATE SEQUENCE jobs_audit_log_seq; - -CREATE TABLE jobs_audit_log ( - id int NOT NULL DEFAULT NEXTVAL ('jobs_audit_log_seq'), - jobId int NOT NULL, - fromStatus varchar(255) DEFAULT NULL, - toStatus varchar(255) NOT NULL, - transitionDate timestamp(0) NOT NULL, - message varchar(1000) DEFAULT NULL, - PRIMARY KEY (id), - FOREIGN KEY (jobId) - REFERENCES jobs(id) - ON DELETE CASCADE -); - -CREATE SEQUENCE parameters_seq; - -CREATE TABLE parameters ( - id int NOT NULL DEFAULT NEXTVAL ('parameters_seq'), - jobId int NOT NULL, - "name" varchar(255) NOT NULL, - value varchar(4096) DEFAULT NULL, - type varchar(45) NOT NULL, - PRIMARY KEY (id), - FOREIGN KEY (jobId) - REFERENCES jobs(id) - ON DELETE CASCADE--, -); - -CREATE SEQUENCE nci_details_seq; - -CREATE TABLE nci_details ( - id int NOT NULL DEFAULT NEXTVAL ('nci_details_seq'), - "user" varchar(128) DEFAULT NULL, - nci_username bytea DEFAULT NULL, - nci_key bytea DEFAULT NULL, - nci_project bytea DEFAULT NULL, - PRIMARY KEY (id), - FOREIGN KEY ("user") - REFERENCES users(id) - ON DELETE CASCADE ON UPDATE CASCADE -); - CREATE TABLE states ( id varchar(128) NOT NULL, "userid" varchar(128) DEFAULT NULL, @@ -215,56 +71,3 @@ CREATE TABLE bookmark_download_options ( ); CREATE INDEX ID_BOOKMARKS ON bookmark_download_options (bookmarkId); - -CREATE SEQUENCE data_purchases_seq; - -CREATE TABLE data_purchases ( - id int NOT NULL DEFAULT NEXTVAL ('data_purchases_seq'), - date timestamp(0) NOT NULL, - amount double precision NOT NULL, - downloadUrl varchar(4096) NOT NULL, - cswRecord text NOT NULL, - onlineResourceType varchar(25) NOT NULL, - url varchar(4096) NOT NULL, - localPath varchar(1024) DEFAULT NULL, - "name" varchar(128) DEFAULT NULL, - description varchar(1024) DEFAULT NULL, - northBoundLatitude double precision DEFAULT NULL, - southBoundLatitude double precision DEFAULT NULL, - eastBoundLongitude double precision DEFAULT NULL, - westBoundLongitude double precision DEFAULT NULL, - paymentRecord varchar(4096) NOT NULL, - userId varchar(128) NOT NULL, - PRIMARY KEY (id), - CONSTRAINT USER_ID_DATA_PURCHASES FOREIGN KEY (userId) REFERENCES users (id) ON DELETE CASCADE -); - -CREATE INDEX USER_ID_DATA_PURCHASES ON data_purchases (userId); - -CREATE SEQUENCE job_purchases_seq; - -CREATE TABLE job_purchases ( - id int NOT NULL DEFAULT NEXTVAL ('job_purchases_seq'), - date timestamp(0) NOT NULL, - amount double precision NOT NULL, - jobId int NOT NULL, - jobName varchar(128) DEFAULT NULL, - paymentRecord varchar(4096) NOT NULL, - userId varchar(128) NOT NULL, - PRIMARY KEY (id), - CONSTRAINT USER_ID_JOB_PURCHASES FOREIGN KEY (userId) REFERENCES users (id) ON DELETE CASCADE -); - -CREATE INDEX USER_ID_JOB_PURCHASES ON job_purchases (userId); - -CREATE SEQUENCE job_annotations_seq; - -CREATE TABLE job_annotations ( - id int NOT NULL DEFAULT NEXTVAL ('job_annotations_seq'), - job_id int NOT NULL, - value varchar(255) NOT NULL, - PRIMARY KEY (id), - FOREIGN KEY (job_id) - REFERENCES jobs(id) - ON DELETE CASCADE -); diff --git a/src/main/java/org/auscope/portal/server/config/AppContext.java b/src/main/java/org/auscope/portal/server/config/AppContext.java index b1efa527b..3e0193831 100755 --- a/src/main/java/org/auscope/portal/server/config/AppContext.java +++ b/src/main/java/org/auscope/portal/server/config/AppContext.java @@ -7,7 +7,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Properties; import java.util.TimeZone; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -15,9 +14,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.velocity.app.VelocityEngine; -import org.auscope.portal.core.cloud.MachineImage; -import org.auscope.portal.core.cloud.StagingInformation; import org.auscope.portal.core.configuration.ServiceConfiguration; import org.auscope.portal.core.configuration.ServiceConfigurationItem; import org.auscope.portal.core.server.PortalPropertySourcesPlaceholderConfigurer; @@ -29,20 +25,12 @@ import org.auscope.portal.core.services.GoogleCloudMonitoringCachedService; import org.auscope.portal.core.services.KnownLayerService; import org.auscope.portal.core.services.OpendapService; -import org.auscope.portal.core.services.PortalServiceException; import org.auscope.portal.core.services.VocabularyCacheService; import org.auscope.portal.core.services.VocabularyFilterService; import org.auscope.portal.core.services.WCSService; import org.auscope.portal.core.services.WFSGml32Service; import org.auscope.portal.core.services.WFSService; import org.auscope.portal.core.services.WMSService; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudComputeServiceAws; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.services.cloud.CloudStorageServiceJClouds; -import org.auscope.portal.core.services.cloud.STSRequirement; -import org.auscope.portal.core.services.cloud.monitor.JobStatusChangeListener; -import org.auscope.portal.core.services.cloud.monitor.JobStatusMonitor; import org.auscope.portal.core.services.csw.CSWServiceItem; import org.auscope.portal.core.services.csw.GriddedCSWRecordTransformerFactory; import org.auscope.portal.core.services.csw.ViewGriddedCSWRecordFactory; @@ -64,27 +52,13 @@ import org.auscope.portal.core.xslt.GmlToHtml; import org.auscope.portal.core.xslt.WfsToKmlTransformer; import org.auscope.portal.mscl.MSCLWFSService; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.auscope.portal.server.vegl.VglMachineImage; -import org.auscope.portal.server.vegl.mail.JobCompletionMailSender; import org.auscope.portal.server.web.CatalogServicesHealthIndicator; import org.auscope.portal.server.web.SearchHttpServiceCaller; -import org.auscope.portal.server.web.service.ANVGLFileStagingService; -import org.auscope.portal.server.web.service.ANVGLProvenanceService; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.NCIDetailsService; import org.auscope.portal.server.web.service.SimpleWfsService; -import org.auscope.portal.server.web.service.VGLCryptoService; -import org.auscope.portal.server.web.service.cloud.CloudComputeServiceNci; -import org.auscope.portal.server.web.service.cloud.CloudStorageServiceNci; import org.auscope.portal.server.web.service.monitor.KnownLayerStatusMonitor; -import org.auscope.portal.server.web.service.monitor.VGLJobStatusChangeHandler; -import org.auscope.portal.server.web.service.monitor.VGLJobStatusMonitor; import org.quartz.Trigger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; -import org.springframework.beans.factory.config.MethodInvokingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; @@ -96,7 +70,6 @@ import org.springframework.scheduling.quartz.CronTriggerFactoryBean; import org.springframework.scheduling.quartz.JobDetailFactoryBean; import org.springframework.scheduling.quartz.SchedulerFactoryBean; -import org.springframework.scheduling.quartz.SimpleTriggerFactoryBean; import org.springframework.web.multipart.support.StandardServletMultipartResolver; @@ -116,36 +89,6 @@ public class AppContext { protected final Log logger = LogFactory.getLog(getClass()); - @Value("${cloud.aws.account:undefined}") - private String awsAcct; - - @Bean public String awsAccount() { - return awsAcct; - } - - @Value("${cloud.aws.accesskey:undefined}") - private String awsAccessKey; - - @Value("${cloud.aws.secretkey:undefined}") - private String awsSecretKey; - - @Value("${cloud.aws.sessionkey:undefined}") - private String awsSessionKey; - - @Value("${cloud.aws.stsrequirement:Mandatory}") - private String awsStsRequirement; - - @Value("${cloud.localStageInDir}") - private String stageInDirectory; - @Value("${localCacheDir:#{null}}") - private String localCacheDir; - - @Value("${cloud.proms.report.url}") - private String promsUrl; - - @Value("${cloud.proms.reportingsystem.uri}") - private String promsReportingSystemUri; - @Value("${smtp.server}") private String smtpServer; @@ -161,12 +104,6 @@ public class AppContext { @Value("${knownLayersCronExpression:0 0 3 * * ?}") private String knownLayersCronExpression; - @Value("${cloud.encryption.password}") - private String encryptionPassword; - - @Value("${cloud.sssc.solutions.url}") - private String solutionsUrl; - // Active profile i.e. 'test' or 'prod' @Value("${spring.profiles.active}") private String activeProfile; @@ -174,16 +111,6 @@ public class AppContext { @Value("${spring.data.elasticsearch.manualUpdateOnly:false}") private boolean manualUpdateOnly; - @Autowired - private VEGLJobManager jobManager; - - @Autowired - private PortalUserService userService; - - @Autowired - private NCIDetailsService nciDetailsService; - - @Autowired private ArrayList cswServiceList; @@ -197,99 +124,16 @@ public MailSender mailSender() { return mailSender; } - @Bean - public VelocityEngine velocityEngine() throws Exception { - Properties properties = new Properties(); - properties.setProperty("input.encoding", "UTF-8"); - properties.setProperty("output.encoding", "UTF-8"); - properties.setProperty("resource.loader", "class"); - properties.setProperty("class.resource.loader.class", - "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); - // Stop logging to velocity.log and use standard logging - properties.setProperty("runtime.log.logsystem.class", "org.apache.velocity.runtime.log.SimpleLog4JLogSystem"); - properties.setProperty("runtime.log.logsystem.log4j.category", "velocity"); - properties.setProperty("runtime.log.logsystem.log4j.logger", "velocity"); - VelocityEngine velocityEngine = new VelocityEngine(properties); - return velocityEngine; - } - - @Bean - public JobCompletionMailSender jobCompletionMailSender() throws Exception { - JobCompletionMailSender sender = new JobCompletionMailSender(jobManager, jobStatusLogReader(), mailSender(), velocityEngine()); - sender.setTemplate("org/auscope/portal/server/web/service/monitor/templates/job-completion.tpl"); - sender.setDateFormat("EEE, d MMM yyyy HH:mm:ss"); - sender.setMaxLengthForSeriesNameInSubject(15); - sender.setMaxLengthForJobNameInSubject(15); - sender.setMaxLinesForTail(5); - sender.setEmailSender(portalAdminEmail); - sender.setEmailSubject("VGL Job (%s"); - sender.setPortalUrl(frontEndUrl); - return sender; - } - - @Bean(name="pylintCommand") - public List pylintCommand() { - List command = new ArrayList(); - command.add("pylint"); - command.add("-r"); - command.add("n"); - command.add("-f"); - command.add("json"); - command.add("--disable=R,C"); - return command; - } - - @Bean - public CloudStorageService[] cloudStorageServices() { - CloudStorageService[] storageServices = new CloudStorageService[2]; - storageServices[0] = cloudStorageServiceAwsSydney(); - storageServices[1] = cloudStorageServiceNci(); - return storageServices; - } - - @Bean - public ANVGLProvenanceService anvglProvenanceService() { - ANVGLProvenanceService provService = new ANVGLProvenanceService(anvglFileStagingService(), - cloudStorageServices(), promsUrl, promsReportingSystemUri); - return provService; - } - - @Bean - public CloudComputeService[] cloudComputeServices() { - ArrayList computeServicesList = new ArrayList(); - computeServicesList.add(cloudComputeServiceAws()); - computeServicesList.add(cloudComputeServiceNci()); - CloudComputeService computeServices[] = computeServicesList.toArray(new CloudComputeService[computeServicesList.size()]); - return computeServices; - } - - @Bean - public VGLJobStatusAndLogReader jobStatusLogReader() { - return new VGLJobStatusAndLogReader(jobManager, cloudStorageServices(), cloudComputeServices()); - } - @Bean public WFSGetFeatureMethodMaker methodMaker() { return new WFSGetFeatureMethodMaker(); } - @Bean - public VGLJobStatusChangeHandler vglJobStatusChangeHandler() throws Exception { - return new VGLJobStatusChangeHandler(jobManager, jobCompletionMailSender(), jobStatusLogReader(), anvglProvenanceService()); - } - @Bean public MSCLWFSService msclWfsService() { return new MSCLWFSService(httpServiceCallerApp(), methodMaker()); } - @Bean - public JobStatusMonitor jobStatusMonitor() throws Exception { - JobStatusChangeListener[] changeListeners = new JobStatusChangeListener[1]; - changeListeners[0] = vglJobStatusChangeHandler(); - return new JobStatusMonitor(jobStatusLogReader(), changeListeners); - } - @Bean public WFSService wfsService() { return new WFSService(httpServiceCallerApp(), methodMaker(), new GmlToHtml()); @@ -308,19 +152,6 @@ public WFSGml32Service wfsGml32Service() { ); } - @Bean - public JobDetailFactoryBean vglJobStatusMonitorDetail() throws Exception { - JobDetailFactoryBean jobDetail = new JobDetailFactoryBean(); - jobDetail.setJobClass(VGLJobStatusMonitor.class); - Map jobData = new HashMap(); - jobData.put("jobManager", jobManager); - jobData.put("jobStatusMonitor", jobStatusMonitor()); - jobData.put("jobUserService", userService); - jobData.put("nciDetailsService", nciDetailsService); - jobDetail.setJobDataAsMap(jobData); - return jobDetail; - } - /*** * Returns a factory to create jobs that update the OpenStack service status * for known layer services @@ -346,15 +177,6 @@ public static PortalPropertySourcesPlaceholderConfigurer propertyConfigurer() { return new PortalPropertySourcesPlaceholderConfigurer(); } - @Bean - public SimpleTriggerFactoryBean jobMonitorTriggerFactoryBean() throws Exception { - SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean(); - trigger.setJobDetail(vglJobStatusMonitorDetail().getObject()); - trigger.setRepeatInterval(300000); - trigger.setStartDelay(10000); - return trigger; - } - /*** * Returns a factory bean that create trigger for the known layer service status update job. The trigger can * be used in the Quartz scheduler. @@ -375,9 +197,8 @@ public SchedulerFactoryBean schedulerFactoryBean() throws Exception { SchedulerFactoryBean schedulerFactory = new SchedulerFactoryBean(); schedulerFactory.setTaskExecutor(taskExecutor()); - Trigger[] triggers = new Trigger[2]; - triggers[0] = jobMonitorTriggerFactoryBean().getObject(); - triggers[1] = knownLayerStatusCronTriggerFactoryBean().getObject(); + Trigger[] triggers = new Trigger[1]; + triggers[0] = knownLayerStatusCronTriggerFactoryBean().getObject(); schedulerFactory.setTriggers(triggers); // One off scheduler to get known layers X minutes after startup @@ -548,36 +369,11 @@ public VocabularyCacheService vocabularyCacheService() { return new VocabularyCacheService(taskExecutor(), vocabularyServiceList()); } - @Bean - public VGLCryptoService encryptionService() throws PortalServiceException { - return new VGLCryptoService(encryptionPassword); - } - @Bean public VocabularyFilterService vocabularyFilterService() { return new VocabularyFilterService(vocabularyCacheService()); } - @Bean - public CloudStorageServiceJClouds cloudStorageServiceAwsSydney() { - CloudStorageServiceJClouds storageService = new CloudStorageServiceJClouds(null, "aws-s3", awsAccessKey, awsSecretKey, awsSessionKey, "ap-southeast-2", false, true); - storageService.setName("Amazon Web Services - S3"); - storageService.setId("amazon-aws-storage-sydney"); - storageService.setBucket("vgl-csiro"); - storageService.setAdminEmail(portalAdminEmail); - STSRequirement req = STSRequirement.valueOf(awsStsRequirement); - storageService.setStsRequirement(req); - return storageService; - } - - @Bean - public CloudStorageServiceNci cloudStorageServiceNci() { - CloudStorageServiceNci cloudStorageService = new CloudStorageServiceNci("gadi.nci.org.au", "nci-gadi"); - cloudStorageService.setId("nci-gadi-storage"); - cloudStorageService.setName("National Computing Infrastructure - Gadi"); - return cloudStorageService; - } - @Lazy @Autowired private ViewKnownLayerFactory viewFactory; @@ -601,86 +397,21 @@ public InetAddress inetAddress() throws UnknownHostException { return InetAddress.getLocalHost(); } - @Bean - public VglMachineImage machineImageEscript() { - VglMachineImage machineImage = new VglMachineImage("ap-southeast-2/ami-0487de67"); - machineImage.setName("escript"); - machineImage.setDescription("A Debian (Jessie) machine with escript already installed."); - machineImage.setKeywords(new String[] {"escript", "debian"}); - return machineImage; - } - @Bean public OpendapService opendapService() { return new OpendapService(httpServiceCallerApp(), getDataMethodMaker()); } - @Bean - public VglMachineImage machineImageAemInversion() { - VglMachineImage machineImage = new VglMachineImage("ap-southeast-2/ami-736b3010"); - machineImage.setName("AEM-Inversion"); - machineImage.setDescription("A Debian (Jessie) machine with aem already installed."); - machineImage.setKeywords(new String[] {"AEM-Inversion", "debian"}); - return machineImage; - } - @Bean public SISSVoc2MethodMaker sissVocMethodMaker() { return new SISSVoc2MethodMaker(); } - @Bean - public MachineImage[] vglMachineImages() { - MachineImage[] machineImages = new MachineImage[2]; - machineImages[0] = machineImageEscript(); - machineImages[1] = machineImageAemInversion(); - return machineImages; - } - @Bean public ConceptFactory conceptFactory() { return new ConceptFactory(); } - @Bean - public CloudComputeServiceAws cloudComputeServiceAws() { - CloudComputeServiceAws computeService = new CloudComputeServiceAws("ec2.ap-southeast-2.amazonaws.com", - awsAccessKey, awsSecretKey, null, awsSessionKey); - computeService.setId("aws-ec2-compute"); - computeService.setName("Amazon Web Services - EC2"); - STSRequirement req = STSRequirement.valueOf(awsStsRequirement); - computeService.setStsRequirement(req); - computeService.setAvailableImages(vglMachineImages()); - return computeService; - } - - @Bean - public CloudComputeServiceNci cloudComputeServiceNci() { - CloudComputeServiceNci computeService = new CloudComputeServiceNci(cloudStorageServiceNci(), "gadi.nci.org.au"); - computeService.setId("nci-gadi-compute"); - computeService.setName("National Computing Infrastructure - Gadi"); - return computeService; - } - - @Bean - public StagingInformation stagingInformation() { - return new StagingInformation(stageInDirectory); - } - - @Bean - public ANVGLFileStagingService anvglFileStagingService() { - return new ANVGLFileStagingService(stagingInformation()); - } - - // Inject the configured solutions centre URL - @Bean - public MethodInvokingBean injectSsscUrl() { - MethodInvokingBean ssscUrlBean = new MethodInvokingBean(); - ssscUrlBean.setStaticMethod("org.auscope.portal.server.web.service.ScmEntryService.setSolutionsUrl"); - ssscUrlBean.setArguments(solutionsUrl); - return ssscUrlBean; - } - @Bean public StandardServletMultipartResolver multipartResolver() { return new StandardServletMultipartResolver(); @@ -711,7 +442,6 @@ public ServiceConfiguration serviceConfiguration() { return serviceConfiguration; } - @Value("${env.stackdriver.enable}") private boolean enableStackdriver; @Value("${env.stackdriver.private_key}") private String privateKey; @Value("${env.stackdriver.private_key_id}") private String privateKeyId; diff --git a/src/main/java/org/auscope/portal/server/vegl/VEGLJob.java b/src/main/java/org/auscope/portal/server/vegl/VEGLJob.java deleted file mode 100644 index 703beae4a..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VEGLJob.java +++ /dev/null @@ -1,754 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import jakarta.persistence.CascadeType; -import jakarta.persistence.CollectionTable; -import jakarta.persistence.Column; -import jakarta.persistence.ElementCollection; -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.MapKey; -import jakarta.persistence.OneToMany; -import jakarta.persistence.Table; -import jakarta.persistence.Transient; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.server.vegl.VglParameter.ParameterType; - -/** - * A specialisation of a generic cloud job for the VEGL Portal - * - * A VEGL job is assumed to write all output to a specific cloud location - * @author Josh Vote - * - */ -@Entity -@Table(name = "jobs") -public class VEGLJob extends CloudJob implements Cloneable { - private static final long serialVersionUID = -57851899164623641L; - - @SuppressWarnings("unused") - @Transient - private final Log logger = LogFactory.getLog(this.getClass()); - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - private String registeredUrl; - private Integer seriesId; - private boolean emailNotification; - private String processTimeLog; - private String storageBucket; - private String promsReportUrl; - private String computeVmRunCommand; - - /** - * max walltime for the job. 0 or null indicate that no walltime applies to the job - */ - private Integer walltime; - private boolean containsPersistentVolumes; - - /** Time when the job executes as opposed to when the job was submitted **/ - private Date executeDate; - - /** A map of VglParameter objects keyed by their parameter names*/ - @OneToMany(mappedBy = "parent", fetch=FetchType.EAGER, cascade=CascadeType.ALL, orphanRemoval=true) - @MapKey(name="name") - private Map jobParameters; - - /** A list of VglDownload objects associated with this job*/ - @OneToMany(mappedBy="parent", fetch=FetchType.EAGER, cascade=CascadeType.ALL, orphanRemoval=true) - private List jobDownloads; - - /** A list of FileInformation objects associated with this job*/ - /* - private List jobFiles = new ArrayList<>(); - */ - - /** A set of Solutions associated with this job */ - @ElementCollection - @CollectionTable(name="job_solutions", joinColumns=@JoinColumn(name="job_id")) - @Column(name="solution_id") - private Set jobSolutions; - - /** - * A set of annotations associated with this job. - */ - @ElementCollection(fetch=FetchType.EAGER) - @CollectionTable(name="job_annotations", joinColumns=@JoinColumn(name="job_id")) - @Column(name="value") - private Set annotations; - - /* - * CloudJob parameters - */ - /** Descriptive name of this job */ - protected String name; - /** Long description of this job */ - protected String description; - /** Email address of job submitter */ - protected String emailAddress; - /** user name of job submitter */ - protected String user; - /** date/time when this job was submitted */ - protected Date submitDate; - /** date/time when this job was processed */ - protected Date processDate; - /** descriptive status of this job */ - protected String status; - - /** the ID of the VM that will be used to run this job */ - protected String computeVmId; - /** the ID of the VM instance that is running this job (will be null if no job is currently running) */ - protected String computeInstanceId; - /** The type of the compute instance to start (size of memory, number of CPUs etc) - eg m1.large. Can be null */ - protected String computeInstanceType; - /** The name of the key to inject into the instance at startup for root access. Can be null */ - protected String computeInstanceKey; - /** The unique ID of the storage service this job has been using */ - protected String computeServiceId; - - /** The key prefix for all files associated with this job in the specified storage bucket */ - protected String storageBaseKey; - /** The unique ID of the storage service this job has been using */ - protected String storageServiceId; - - transient protected Map properties = new HashMap(); - - - - - public boolean isContainsPersistentVolumes() { - return containsPersistentVolumes; - } - - - public void setContainsPersistentVolumes(boolean containsPersistentVolumes) { - this.containsPersistentVolumes = containsPersistentVolumes; - } - - /** - * Creates an unitialised VEGLJob - */ - public VEGLJob() { - super(); - } - - - /** - * - */ - public Integer getId() { - return id; - } - - - /** - * - */ - public void setId(Integer id) { - this.id = id; - } - - - /** - * Sets the processTimeLog - * @param String time - */ - public void setProcessTimeLog(String processTimeLog) { - this.processTimeLog=processTimeLog; - - } - - /** - * @return the processTimeLog - */ - public String getProcessTimeLog() { - return processTimeLog; - } - - /** - * Gets where this job has been registered - * @return - */ - public String getRegisteredUrl() { - return registeredUrl; - } - - /** - * Sets where this job has been registered - * @param registeredUrl - */ - public void setRegisteredUrl(String registeredUrl) { - this.registeredUrl = registeredUrl; - } - - /** - * Gets the ID of the series this job belongs to - * @return - */ - public Integer getSeriesId() { - return seriesId; - } - - /** - * Sets the ID of the series this job belongs to - * @param seriesId - */ - public void setSeriesId(Integer seriesId) { - this.seriesId = seriesId; - } - - /** - * Gets the email notification flag for this job - * @return - */ - public boolean getEmailNotification() { - return emailNotification; - } - - /** - * Sets the email notification flag for this job - * @param seriesId - */ - public void setEmailNotification(boolean emailNotification) { - this.emailNotification = emailNotification; - } - - /** - * A set of VglJobParameter objects - * @return - */ - public Map getJobParameters() { - return (jobParameters != null) ? jobParameters : new HashMap(); - } - - /** A set of VglJobParameter objects*/ - public void setJobParameters(Map jobParameters) { - if(this.jobParameters == null) { - this.jobParameters = jobParameters; - } else { - this.jobParameters.clear(); - if(jobParameters != null) { - for (String key : jobParameters.keySet()) { - jobParameters.get(key).setParent(this); - this.jobParameters.put(key, jobParameters.get(key)); - } - } - } - } - - /** - * Sets a single parameter within this job - * @param name The name of the parameter (parameters with the same name will be overwritten) - * @param value The value of the parameter - * @param type The type of the parameter ('number' or 'string') - */ - public void setJobParameter(String name, String value, ParameterType type) { - VglParameter param = jobParameters.get(name); - if (param == null) { - param = new VglParameter(); - } - - param.setParent(this); - param.setName(name); - param.setValue(value); - param.setType(type.name()); - - jobParameters.put(name, param); - } - - /** - * Gets the VglParameter with a particular name - * @param key - * @return - */ - public VglParameter getJobParameter(String key) { - return this.jobParameters.get(key); - } - - - /** - * A list of VglDownload objects associated with this job - * @return - */ - public List getJobDownloads() { - return (jobDownloads != null) ? jobDownloads : new ArrayList(); - } - - /** - * A list of VglDownload objects associated with this job - * @param jobDownloads - */ - public void setJobDownloads(List jobDownloads) { - if (this.jobDownloads == null) { - this.jobDownloads = new ArrayList(); - } - this.jobDownloads.clear(); - if (jobDownloads != null) { - for (VglDownload dl : jobDownloads) { - dl.setParent(this); - this.jobDownloads.add(dl); - } - } - } - - public Set getJobSolutions() { - return (jobSolutions != null) ? jobSolutions : new HashSet(); - } - - public void addJobSolution(String solutionId) { - this.jobSolutions.add(solutionId); - } - - public void setJobSolutions(Set solutions) { - if(this.jobSolutions == null) { - this.jobSolutions = solutions; - } else { - this.jobSolutions.clear(); - if(solutions != null) { - this.jobSolutions.addAll(solutions); - } - } - } - - public Set getAnnotations() { - return (annotations != null) ? annotations : new HashSet(); - } - - public void setAnnotations(Collection annotations) { - if (this.annotations == null) { - this.annotations = new HashSet(); - } else { - this.annotations.clear(); - } - - this.annotations.addAll(annotations); - } - - /** - * Similar to clone but ensures compatibility with hibernate. No IDs or references (except for immutable ones) - * will be shared by the clone and this object. - * @return - */ - public VEGLJob safeClone() { - VEGLJob newJob = new VEGLJob(); - newJob.setComputeInstanceId(this.getComputeInstanceId()); - newJob.setComputeInstanceKey(this.getComputeInstanceKey()); - newJob.setComputeInstanceType(this.getComputeInstanceType()); - newJob.setComputeServiceId(this.getComputeServiceId()); - newJob.setComputeVmId(this.getComputeVmId()); - newJob.setComputeVmRunCommand(this.getComputeVmRunCommand()); - newJob.setDescription(this.getDescription()); - newJob.setEmailAddress(this.getEmailAddress()); - newJob.setName(this.getName()); - newJob.setRegisteredUrl(this.getRegisteredUrl()); - newJob.setSeriesId(this.getSeriesId()); - newJob.setStatus(this.getStatus()); //change the status - newJob.setStorageServiceId(this.getStorageServiceId()); - newJob.setStorageBaseKey(this.getStorageBaseKey()); - newJob.setSubmitDate(this.getSubmitDate()); //this job isn't submitted yet - newJob.setUser(this.getUser()); - newJob.setStorageBucket(this.getStorageBucket()); - newJob.setWalltime(this.getWalltime()); - newJob.setExecuteDate(this.getExecuteDate()); - newJob.setPromsReportUrl(this.getPromsReportUrl()); - newJob.setContainsPersistentVolumes(this.isContainsPersistentVolumes()); - - List newDownloads = new ArrayList<>(); - for (VglDownload dl : this.getJobDownloads()) { - VglDownload dlClone = (VglDownload) dl.clone(); - dlClone.setId(null); - newDownloads.add(dlClone); - } - newJob.setJobDownloads(newDownloads); - - Map newParams = new HashMap<>(); - if(this.jobParameters != null) { - for (String key : this.jobParameters.keySet()) { - VglParameter paramClone = (VglParameter)this.jobParameters.get(key).clone(); - paramClone.setId(null); - newParams.put(key, paramClone); - } - } - newJob.setJobParameters(newParams); - - for (String key : properties.keySet()) { - newJob.setProperty(key, getProperty(key)); - } - - newJob.setJobSolutions(new HashSet<>(this.getJobSolutions())); - - return newJob; - } - - /** - * The storage bucket name that will receive job artifacts (usually unique to user) - */ - @Override - public String getStorageBucket() { - return storageBucket; - } - - /** - * The storage bucket name that will receive job artifacts (usually unique to user) - * @param storageBucket - */ - public void setStorageBucket(String storageBucket) { - this.storageBucket = storageBucket; - } - - /** - * The walltime in minutes. - * @return Walltime in minutes or null if no walltime is set. - */ - public Integer getWalltime() { - return walltime; - } - - public boolean isWalltimeSet() { - return getWalltime()!=null && getWalltime()>0; - } - - /** - * Set the walltime in minutes - * @param walltime - */ - public void setWalltime(Integer walltime) { - this.walltime = walltime; - } - - /** - * @return The date of job execution - */ - public Date getExecuteDate() { - return executeDate; - } - - public void setExecuteDate(Date executeDate) { - this.executeDate = executeDate; - } - - /** - * @return The URL of the associated PROMS Report - */ - public String getPromsReportUrl() { - return promsReportUrl; - } - - public void setPromsReportUrl(String promsReportUrl) { - this.promsReportUrl = promsReportUrl; - } - - /** - * The command that will be used to run the python run script. If null, most providers will use 'python' - * @return - */ - public String getComputeVmRunCommand() { - return computeVmRunCommand; - } - - /** - * The command that will be used to run the python run script. If null, most providers will use 'python' - * @param computeVmRunCommand - */ - public void setComputeVmRunCommand(String computeVmRunCommand) { - this.computeVmRunCommand = computeVmRunCommand; - } - - @Override - public String toString() { - return "VEGLJob [registeredUrl=" + registeredUrl + ", seriesId=" - + seriesId + ", id=" + id + ", name=" + name + ", description=" - + description + "]"; - } - - public String setProperty(String key, String value) { - if (value == null) { - String oldValue = properties.get(key); - properties.remove(key); - return oldValue; - } - return properties.put(key, value); - } - - @Override - public String getProperty(String key) { - return properties.get(key); - } - - /** - * Descriptive name of this job - * - * @return - */ - public String getName() { - return name; - } - - /** - * Descriptive name of this job - * - * @param name - */ - public void setName(String name) { - this.name = name; - } - - /** - * Long description of this job - * - * @return - */ - public String getDescription() { - return description; - } - - /** - * Long description of this job - * - * @param description - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * Email address of job submitter - * - * @return - */ - public String getEmailAddress() { - return emailAddress; - } - - /** - * Email address of job submitter - * - * @param emailAddress - */ - public void setEmailAddress(String emailAddress) { - this.emailAddress = emailAddress; - } - - /** - * user name of job submitter - * - * @return - */ - @Override - public String getUser() { - return user; - } - - /** - * user name of job submitter - * - * @param user - */ - public void setUser(String user) { - this.user = user; - } - - /** - * date/time when this job was submitted - * - * @return - */ - public Date getSubmitDate() { - return submitDate; - } - - /** - * date/time when this job was submitted - * - * @param submitDate - */ - public void setSubmitDate(Date submitDate) { - this.submitDate = submitDate; - } - - /** - * date/time when this job was processed - * - * @return - */ - public Date getProcessDate() { - return processDate; - } - - /** - * date/time when this job was processed - * - * @param processDate - */ - public void setProcessDate(Date processDate) { - this.processDate = processDate; - } - - /** - * date/time when this job was submitted (expects a date in the format CloudJob.DATE_FORMAT) - * - * @param submitDate - * @throws ParseException - */ - public void setSubmitDate(String submitDate) throws ParseException { - SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); - this.setSubmitDate(sdf.parse(submitDate)); - } - - /** - * descriptive status of this job - * - * @return - */ - public String getStatus() { - return status; - } - - /** - * descriptive status of this job - * - * @param status - */ - public void setStatus(String status) { - this.status = status; - } - - /** - * the ID of the VM that will be used to run this job - * - * @return - */ - public String getComputeVmId() { - return computeVmId; - } - - /** - * the ID of the VM that will be used to run this job - * - * @param computeVmId - */ - public void setComputeVmId(String computeVmId) { - this.computeVmId = computeVmId; - } - - /** - * the ID of the VM instance that is running this job (will be null if no job is currently running) - * - * @return - */ - public String getComputeInstanceId() { - return computeInstanceId; - } - - /** - * the ID of the VM instance that is running this job (will be null if no job is currently running) - * - * @param computeInstanceId - */ - public void setComputeInstanceId(String computeInstanceId) { - this.computeInstanceId = computeInstanceId; - } - - /** - * The type of the compute instance to start (size of memory, number of CPUs etc) - eg m1.large. Can be null - */ - public String getComputeInstanceType() { - return computeInstanceType; - } - - /** - * The type of the compute instance to start (size of memory, number of CPUs etc) - eg m1.large. Can be null - */ - public void setComputeInstanceType(String computeInstanceType) { - this.computeInstanceType = computeInstanceType; - } - - /** - * The name of the key to inject into the instance at startup for root access. Can be null - */ - public String getComputeInstanceKey() { - return computeInstanceKey; - } - - /** - * The name of the key to inject into the instance at startup for root access. Can be null - */ - public void setComputeInstanceKey(String computeInstanceKey) { - this.computeInstanceKey = computeInstanceKey; - } - - /** - * The unique ID of the compute service this job has been using - * - * @return - */ - public String getComputeServiceId() { - return computeServiceId; - } - - /** - * The unique ID of the compute service this job has been using - * - * @param computeServiceId - */ - public void setComputeServiceId(String computeServiceId) { - this.computeServiceId = computeServiceId; - } - - /** - * The unique ID of the storage service this job has been using - * - * @return - */ - public String getStorageServiceId() { - return storageServiceId; - } - - /** - * The unique ID of the storage service this job has been using - * - * @param storageServiceId - */ - public void setStorageServiceId(String storageServiceId) { - this.storageServiceId = storageServiceId; - } - - /** - * The key prefix for all files associated with this job in the specified storage bucket - * - * @return - */ - @Override - public String getStorageBaseKey() { - return storageBaseKey; - } - - /** - * The key prefix for all files associated with this job in the specified storage bucket - * - * @param storageBaseKey - */ - @Override - public void setStorageBaseKey(String storageBaseKey) { - this.storageBaseKey = storageBaseKey; - } - -} diff --git a/src/main/java/org/auscope/portal/server/vegl/VEGLJobManager.java b/src/main/java/org/auscope/portal/server/vegl/VEGLJobManager.java deleted file mode 100644 index 30b72352f..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VEGLJobManager.java +++ /dev/null @@ -1,201 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.util.Date; -import java.util.List; - -import org.apache.commons.lang.exception.ExceptionUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.auscope.portal.server.web.service.VEGLJobService; -import org.auscope.portal.server.web.service.VEGLSeriesService; -import org.auscope.portal.server.web.service.VGLJobAuditLogService; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -/** - * Class that talks to the data objects to retrieve or save data - * - * @author Cihan Altinay - * @author Josh Vote - * @author Richard Goh - */ -@Component -public class VEGLJobManager { - protected final Log logger = LogFactory.getLog(getClass()); - - @Autowired - private VEGLJobService jobService; - - @Autowired - private VEGLSeriesService seriesService; - - @Autowired - private VGLJobAuditLogService jobAuditLogService; - - @Autowired - private NCIDetailsService nciDetailsService; - - public List querySeries(String user, String name, String desc) { - return seriesService.query(user, name, desc); - } - - public List getSeriesJobs(int seriesId, PortalUser user) throws PortalServiceException { - List jobs = jobService.getJobsOfSeries(seriesId, user); - return applyNCIDetails(jobs, user); - } - - public List getUserJobs(PortalUser user) throws PortalServiceException { - List jobs = jobService.getJobsOfUser(user); - return applyNCIDetails(jobs, user); - } - - public List getPendingOrActiveJobs() { - return jobService.getPendingOrActiveJobs(); - } - - public List getInQueueJobs() { - return jobService.getInQueueJobs(); - } - - public VEGLJob getJobById(int jobId, PortalUser user) throws PortalServiceException { - return applyNCIDetails(jobService.get(jobId, user), user); - } - - public VEGLJob getJobById(int jobId, String stsArn, String clientSecret, String s3Role, String userEmail, String nciUser, String nciProj, String nciKey) { - return jobService.get(jobId, stsArn, clientSecret, s3Role, userEmail, nciUser, nciProj, nciKey); - } - - public void deleteJob(VEGLJob job) { - jobService.deleteJob(job); - } - - public VEGLSeries getSeriesById(int seriesId, String userEmail) { - return seriesService.get(seriesId, userEmail); - } - - public void saveJob(VEGLJob veglJob) { - jobService.saveJob(veglJob); - } - - /** - * Create the job life cycle audit trail. If the creation is unsuccessful, it - * will silently fail and log the failure message to error log. - * @param oldJobStatus - * @param curJob - * @param message - */ - public void createJobAuditTrail(String oldJobStatus, VEGLJob curJob, String message) { - VGLJobAuditLog vglJobAuditLog = null; - try { - vglJobAuditLog = new VGLJobAuditLog(); - vglJobAuditLog.setJobId(curJob.getId()); - vglJobAuditLog.setFromStatus(oldJobStatus); - vglJobAuditLog.setToStatus(curJob.getStatus()); - vglJobAuditLog.setTransitionDate(new Date()); - vglJobAuditLog.setMessage(message); - - // Failure in the creation of the job life cycle audit trail is - // not critical hence we allow it to fail silently and log it. - jobAuditLogService.save(vglJobAuditLog); - } catch (Exception ex) { - logger.warn("Error creating audit trail for job: " + vglJobAuditLog, ex); - } - } - - /** - * Create the job life cycle audit trail. If the creation is unsuccessful, it - * will silently fail and log the failure message to error log. - * @param oldJobStatus - * @param curJob - * @param message - */ - public void createJobAuditTrail(String oldJobStatus, VEGLJob curJob, Throwable exception) { - String message = ExceptionUtils.getStackTrace(exception); - if(message.length() > 1000){ - message = message.substring(0,1000); - } - VGLJobAuditLog vglJobAuditLog = null; - try { - vglJobAuditLog = new VGLJobAuditLog(); - vglJobAuditLog.setJobId(curJob.getId()); - vglJobAuditLog.setFromStatus(oldJobStatus); - vglJobAuditLog.setToStatus(curJob.getStatus()); - vglJobAuditLog.setTransitionDate(new Date()); - vglJobAuditLog.setMessage(message); - - // Failure in the creation of the job life cycle audit trail is - // not critical hence we allow it to fail silently and log it. - jobAuditLogService.save(vglJobAuditLog); - } catch (Exception ex) { - logger.warn("Error creating audit trail for job: " + vglJobAuditLog, ex); - } - } - - public void deleteSeries(VEGLSeries series) { - seriesService.delete(series); - } - - public void saveSeries(VEGLSeries series) { - seriesService.save(series); - } - - // These are solely for tests - public void setVeglJobService(VEGLJobService jobService) { - this.jobService = jobService; - } - - public void setVeglSeriesService(VEGLSeriesService seriesService) { - this.seriesService = seriesService; - } - - public void setVglJobAuditLogService(VGLJobAuditLogService jobAuditLogService) { - this.jobAuditLogService = jobAuditLogService; - } - - /* - public NCIDetailsService getNciDetailsService() { - return nciDetailsService; - } - */ - - public void setNciDetailsService(NCIDetailsService nciDetailsService) { - this.nciDetailsService = nciDetailsService; - } - - private VEGLJob applyNCIDetails(VEGLJob job, NCIDetails nciDetails) { - if (nciDetails != null) { - try { - nciDetails.applyToJobProperties(job); - } catch (Exception e) { - logger.error("Unable to apply nci details to job:", e); - throw new RuntimeException("Unable to decrypt NCI Details", e); - } - } - - return job; - } - - private VEGLJob applyNCIDetails(VEGLJob job, PortalUser user) throws PortalServiceException { - if (job == null) { - return null; - } - return applyNCIDetails(job, nciDetailsService.getByUser(user)); - } - - private List applyNCIDetails(List jobs, PortalUser user) throws PortalServiceException { - NCIDetails nciDetails = nciDetailsService.getByUser(user); - - if (nciDetails != null) { - for (VEGLJob job: jobs) { - applyNCIDetails(job, nciDetails); - } - } - - return jobs; - } - -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/vegl/VEGLSeries.java b/src/main/java/org/auscope/portal/server/vegl/VEGLSeries.java deleted file mode 100644 index e2e875900..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VEGLSeries.java +++ /dev/null @@ -1,135 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.Serializable; - -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.Table; - -/** - * Simple class that stores information about a job series consisting of - * one or more jobs. - * - * Developed from the original GeodesySeries - * - * @author Cihan Altinay - * @Author Josh Vote - */ -@Entity -@Table(name = "series") -public class VEGLSeries implements Serializable { - - private static final long serialVersionUID = -4483263063748119882L; - - /** A unique identifier for this series */ - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - /** The user owning this series */ - //@OneToMany(mappedBy = "seriesId", fetch=FetchType.EAGER, orphanRemoval = true) - private String user; - /** A short name for this series */ - private String name; - /** A description of this series */ - private String description; - - /** - * Default constructor. - */ - public VEGLSeries() { - user = name = description = ""; - } - - /** - * Returns the unique identifier of this series. - * - * @return The unique ID of this series. - */ - public Integer getId() { - return id; - } - - /** - * Sets the unique identifier of this series. - * - * @param id The new ID for this series. - */ - protected void setId(Integer id) { - assert (id != null); - this.id = id; - } - - /** - * Returns the description of this series. - * - * @return The description of this series. - */ - public String getDescription() { - return description; - } - - /** - * Sets the description of this series. - * - * @param description The description of this series. - */ - public void setDescription(String description) { - assert (description != null); - this.description = description; - } - - /** - * Returns the user owning this series. - * - * @return The user owning this series. - */ - public String getUser() { - return user; - } - - /** - * Sets the user owning this series. - * - * @param user The user owning this series. - */ - public void setUser(String user) { - assert (user != null); - this.user = user; - } - - /** - * Returns the name of this series. - * - * @return The name of this series. - */ - public String getName() { - return name; - } - - /** - * Sets the name of this series. - * - * @param name The name of this series. - */ - public void setName(String name) { - assert (name != null); - this.name = name; - } - - /** - * Returns a String representing the state of this GeodesySeries - * object. - * - * @return A summary of the values of this object's fields - */ - @Override - public String toString() { - return super.toString() + - ",id=" + id + - ",user=\"" + user + "\"" + - ",name=\"" + name + "\"" + - ",description=\"" + description + "\""; - } -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/vegl/VGLDataPurchase.java b/src/main/java/org/auscope/portal/server/vegl/VGLDataPurchase.java deleted file mode 100644 index 7721c235f..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VGLDataPurchase.java +++ /dev/null @@ -1,244 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.Serializable; -import java.util.Date; - -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.ManyToOne; -import jakarta.persistence.Table; - -import org.auscope.portal.server.web.security.PortalUser; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -/** - * Simple class that stores user data purchase information - * @author rob508 - * - */ -@Entity -@Table(name = "data_purchases") -public class VGLDataPurchase implements Serializable { - - private static final long serialVersionUID = 1L; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - - @Column(nullable=false) - private Date date; - - @Column(nullable=false) - private Float amount; - - @Column(nullable=false) - private String downloadUrl; - - @Column(nullable=false) - private String cswRecord; - - @Column(nullable=false) - private String onlineResourceType; - - @Column(nullable=false) - private String url; - - @Column(nullable=false) - private String localPath; - - @Column(nullable=false) - private String name; - - @Column(nullable=false) - private String description; - - @Column(nullable=false) - private Double northBoundLatitude; - - @Column(nullable=false) - private Double southBoundLatitude; - - @Column(nullable=false) - private Double eastBoundLongitude; - - @Column(nullable=false) - private Double westBoundLongitude; - - @Column(nullable=false) - private String paymentRecord; - - /** The user who made the purchase */ - @JsonIgnore - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "userId") - private PortalUser parent; - - public VGLDataPurchase() { - super(); - } - - public VGLDataPurchase(Date date, Float amount, String downloadUrl, String cswRecord, String onlineResourceType, String url, String localPath, String name, String description, - Double northBoundLatitude, Double southBoundLatitude, Double eastBoundLongitude, Double westBoundLongitude, String paymentRecord, - PortalUser user) { - super(); - this.date = date; - this.amount = amount; - this.downloadUrl = downloadUrl; - this.cswRecord = cswRecord; - this.onlineResourceType = onlineResourceType; - this.url = url; - this.localPath = localPath; - this.name = name; - this.description = description; - this.northBoundLatitude = northBoundLatitude; - this.southBoundLatitude = southBoundLatitude; - this.eastBoundLongitude = eastBoundLongitude; - this.westBoundLongitude = westBoundLongitude; - this.paymentRecord = paymentRecord; - this.parent = user; - } - - public Integer getId() { - return id; - } - - public void setId(Integer id) { - this.id = id; - } - - public Date getDate() { - return date; - } - - public void setDate(Date date) { - this.date = date; - } - - public String getDownloadUrl() { - return downloadUrl; - } - - public void setDownloadUrl(String downloadUrl) { - this.downloadUrl = downloadUrl; - } - - public Float getAmount() { - return amount; - } - - public void setAmount(Float amount) { - this.amount = amount; - } - - public String getCswRecord() { - return cswRecord; - } - - public void setCswRecord(String cswRecord) { - this.cswRecord = cswRecord; - } - - public String getOnlineResourceType() { - return onlineResourceType; - } - - public void setOnlineResourceType(String onlineResourceType) { - this.onlineResourceType = onlineResourceType; - } - - public String getUrl() { - return url; - } - - public void setUrl(String url) { - this.url = url; - } - - public String getLocalPath() { - return localPath; - } - - public void setLocalPath(String localPath) { - this.localPath = localPath; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public Double getNorthBoundLatitude() { - return northBoundLatitude; - } - - public void setNorthBoundLatitude(Double northBoundLatitude) { - this.northBoundLatitude = northBoundLatitude; - } - - public Double getSouthBoundLatitude() { - return southBoundLatitude; - } - - public void setSouthBoundLatitude(Double southBoundLatitude) { - this.southBoundLatitude = southBoundLatitude; - } - - public Double getEastBoundLongitude() { - return eastBoundLongitude; - } - - public void setEastBoundLongitude(Double eastBoundLongitude) { - this.eastBoundLongitude = eastBoundLongitude; - } - - public Double getWestBoundLongitude() { - return westBoundLongitude; - } - - public void setWestBoundLongitude(Double westBoundLongitude) { - this.westBoundLongitude = westBoundLongitude; - } - - public String getPaymentRecord() { - return this.paymentRecord; - } - - public void setPaymentRecord(String paymentRecord) { - this.paymentRecord = paymentRecord; - } - - /** - * User that made the purchases - * @return - */ - public PortalUser getParent() { - return parent; - } - - /** - * User that made the purchases - * @param parent - */ - public void setParent(PortalUser parent) { - this.parent = parent; - } - -} diff --git a/src/main/java/org/auscope/portal/server/vegl/VGLJobAuditLog.java b/src/main/java/org/auscope/portal/server/vegl/VGLJobAuditLog.java deleted file mode 100644 index ba930a5b3..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VGLJobAuditLog.java +++ /dev/null @@ -1,132 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.Serializable; -import java.util.Date; - -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.Table; - -/** - * A simple POJO class that stores a job life cycle transition - * information for auditing purposes. - * - * @author Richard Goh - */ -@Entity -@Table(name = "jobs_audit_log") -public class VGLJobAuditLog implements Serializable { - - /** - * Generated on 2012-09-24 - */ - private static final long serialVersionUID = -1762982566490775865L; - - /** The primary key for this parameter*/ - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - /** The id of the job that owns this parameter*/ - private Integer jobId; - /** The descriptive status of the job before its status change */ - private String fromStatus; - /** The descriptive status of the job after its status change */ - private String toStatus; - /** The date & time when this job changed its status*/ - private Date transitionDate; - /** An optional job transition audit log */ - private String message; - - - /** - * Default constructor. - */ - public VGLJobAuditLog() { - } - - /** - * @return the id - */ - public Integer getId() { - return id; - } - - /** - * @param id the id to set - */ - public void setId(Integer id) { - this.id = id; - } - - /** - * @return the jobId - */ - public Integer getJobId() { - return jobId; - } - - /** - * @param jobId the jobId to set - */ - public void setJobId(Integer jobId) { - this.jobId = jobId; - } - - /** - * @return the fromStatus - */ - public String getFromStatus() { - return fromStatus; - } - - /** - * @param fromStatus the fromStatus to set - */ - public void setFromStatus(String fromStatus) { - this.fromStatus = fromStatus; - } - - /** - * @return the toStatus - */ - public String getToStatus() { - return toStatus; - } - - /** - * @param toStatus the toStatus to set - */ - public void setToStatus(String toStatus) { - this.toStatus = toStatus; - } - - /** - * @return the transitionDate - */ - public Date getTransitionDate() { - return transitionDate; - } - - /** - * @param transitionDate the transitionDate to set - */ - public void setTransitionDate(Date transitionDate) { - this.transitionDate = transitionDate; - } - - /** - * @return the message - */ - public String getMessage() { - return message; - } - - /** - * @param message the message to set - */ - public void setMessage(String message) { - this.message = message; - } -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/vegl/VGLJobPurchase.java b/src/main/java/org/auscope/portal/server/vegl/VGLJobPurchase.java deleted file mode 100644 index dc6a46734..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VGLJobPurchase.java +++ /dev/null @@ -1,135 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.Serializable; -import java.util.Date; - -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.ManyToOne; -import jakarta.persistence.Table; - -import org.auscope.portal.server.web.security.PortalUser; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -/** - * Simple class that stores user job purchase information - * @author rob508 - * - */ -@Entity -@Table(name = "job_purchases") -public class VGLJobPurchase implements Serializable { - - private static final long serialVersionUID = 1L; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - - @Column(nullable=false) - private Date date; - - @Column(nullable=false) - private Float amount; - - @Column(nullable=false) - private Integer jobId; - - @Column(nullable=false) - private String jobName; - - @Column(nullable=false) - private String paymentRecord; - - /** The user who made the purchase */ - @JsonIgnore - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "userId") - private PortalUser parent; - - public VGLJobPurchase() { - super(); - } - - public VGLJobPurchase(Date date, Float amount, Integer jobId, String jobName, - String paymentRecord, PortalUser user) { - super(); - this.date = date; - this.amount = amount; - this.jobId = jobId; - this.jobName = jobName; - this.paymentRecord = paymentRecord; - this.parent = user; - } - - public Integer getId() { - return id; - } - - public void setId(Integer id) { - this.id = id; - } - - public Date getDate() { - return date; - } - - public void setDate(Date date) { - this.date = date; - } - - public Float getAmount() { - return amount; - } - - public void setAmount(Float amount) { - this.amount = amount; - } - - public Integer getJobId() { - return jobId; - } - - public void setJobId(Integer jobId) { - this.jobId = jobId; - } - - public String getJobName() { - return jobName; - } - - public void setJobName(String jobName) { - this.jobName = jobName; - } - - public String getPaymentRecord() { - return this.paymentRecord; - } - - public void setPaymentRecord(String paymentRecord) { - this.paymentRecord = paymentRecord; - } - - /** - * User that made the purchases - * @return - */ - public PortalUser getParent() { - return parent; - } - - /** - * User that made the purchases - * @param parent - */ - public void setParent(PortalUser parent) { - this.parent = parent; - } - -} diff --git a/src/main/java/org/auscope/portal/server/vegl/VGLJobStatusAndLogReader.java b/src/main/java/org/auscope/portal/server/vegl/VGLJobStatusAndLogReader.java deleted file mode 100644 index 02257f315..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VGLJobStatusAndLogReader.java +++ /dev/null @@ -1,296 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.Date; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.apache.commons.io.IOUtils; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.services.cloud.monitor.JobStatusReader; -import org.auscope.portal.core.util.FileIOUtil; -import org.auscope.portal.server.web.controllers.BaseCloudController; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.controllers.JobListController; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.service.CloudSubmissionService; -import org.springframework.ui.ModelMap; - -public class VGLJobStatusAndLogReader extends BaseCloudController implements JobStatusReader { - - - private CloudSubmissionService cloudSubmissionService; - - public VGLJobStatusAndLogReader() { - super(null, null, null); - } - - public VGLJobStatusAndLogReader(VEGLJobManager jobManager, - CloudStorageService[] cloudStorageServices, - CloudComputeService[] cloudComputeServices) { - super(cloudStorageServices, cloudComputeServices, jobManager); - } - - public CloudSubmissionService getCloudSubmissionService() { - return cloudSubmissionService; - } - - public void setCloudSubmissionService(CloudSubmissionService cloudSubmissionService) { - this.cloudSubmissionService = cloudSubmissionService; - } - - /** - * Gets a pre parsed version of the internal logs. The resulting object will - * contain the logs sectioned into 'named sections' e.g.: Section for python code, - * section for environment etc. - * - * Will always contain a single section called "Full" containing the un-sectioned - * original log. - * - * @param job - * @return - */ - public ModelMap getSectionedLogs(VEGLJob job) throws PortalServiceException { - return getSectionedLogs(job, JobListController.VL_LOG_FILE); - } - - /** - * Gets a pre parsed version of the specified log file. The resulting object will - * contain the logs sectioned into 'named sections' e.g.: Section for python code, - * section for environment etc. - * - * Will always contain a single section called "Full" containing the un-sectioned - * original log. - * - * @param job - * @return - */ - public ModelMap getSectionedLogs(VEGLJob job, String logFile) throws PortalServiceException { - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - throw new PortalServiceException( - "The specified job doesn't have a storage service.", - "Please ensure you have chosen a storage provider for the job."); - } - - //Download the logs from cloud storage - String logContents = null; - InputStream is = null; - try { - is = cloudStorageService.getJobFile(job, logFile); - logContents = IOUtils.toString(is, StandardCharsets.UTF_8); - } catch (Exception ex) { - log.debug(String.format("The job %1$s hasn't uploaded %2$s yet", job.getId(), logFile)); - } finally { - FileIOUtil.closeQuietly(is); - } - - //If we fail at that, download direct from the running instance - if (logContents == null) { - CloudComputeService compute = getComputeService(job); - if (compute == null) { - throw new PortalServiceException( - "The specified job doesn't have a compute service.", - "Please ensure you have chosen a compute provider for the job."); - } - - logContents = compute.getConsoleLog(job); - if (logContents == null) { - throw new PortalServiceException("The specified job hasn't uploaded any logs yet"); - } - } - - ModelMap namedSections = new ModelMap(); - namedSections.put("Full", logContents); //always include the full log - - //Iterate through looking for start/end matches. All text between a start/end - //tag will be snipped out and used in their own region/section - Pattern p = Pattern.compile("^#### (.*) (.+) ####$[\\n\\r]*", Pattern.MULTILINE); - Matcher m = p.matcher(logContents); - int start = 0; - String currentSectionName = null; - while (m.find()) { - String sectionName = m.group(1); - String delimiter = m.group(2); - - //On a new match - record the location and name - if (delimiter.equals("start")) { - start = m.end(); - currentSectionName = sectionName; - } else if (delimiter.equals("end")) { - //On a closing pattern - ensure we are closing the current region (we don't support nesting) - //Take the snippet of text and store it in our result map - if (sectionName.equals(currentSectionName)) { - String regionText = logContents.substring(start, m.start()); - namedSections.put(sectionName, regionText); - currentSectionName = null; - start = 0; - } - } - } - - //We have an unfinished section... let's include it anyway - if (currentSectionName != null) { - String regionText = logContents.substring(start); - namedSections.put(currentSectionName, regionText); - } - - return namedSections; - } - - /** - * - * @param job - * @param sectionName - * @return null if it doesn't have any log - */ - public String getSectionedLog(VEGLJob job, String sectionName) { - try { - ModelMap sectLogs = getSectionedLogs(job); - return (String)sectLogs.get(sectionName); - } catch (PortalServiceException ex) { - log.debug(ex.getMessage()); - return null; - } - } - - /** - * Using the services internal to the class, determine the current status of this job. Service failure - * will return the underlying job status - */ - @Override - public String getJobStatus(CloudJob cloudJob) { - - String stsArn = cloudJob.getProperty(CloudJob.PROPERTY_STS_ARN); - String clientSecret = cloudJob.getProperty(CloudJob.PROPERTY_CLIENT_SECRET); - String s3Role = cloudJob.getProperty(CloudJob.PROPERTY_S3_ROLE); - String nciUser = cloudJob.getProperty(NCIDetails.PROPERTY_NCI_USER); - String nciProj = cloudJob.getProperty(NCIDetails.PROPERTY_NCI_PROJECT); - String nciKey = cloudJob.getProperty(NCIDetails.PROPERTY_NCI_KEY); - - - //The service hangs onto the underlying job Object but the DB is the point of truth - //Make sure we get an updated job object first! - VEGLJob job = jobManager.getJobById(cloudJob.getId(), stsArn, clientSecret, s3Role, cloudJob.getEmailAddress(), nciUser, nciProj, nciKey); - if (job == null) { - return null; - } - - CloudComputeService cloudComputeService = getComputeService(job); - if (cloudComputeService == null) { - if( ! "Saved".equalsIgnoreCase(job.getStatus())) { - log.warn(String.format("No cloud storage service with id '%1$s' for job '%2$s'. cannot update job status", job.getComputeServiceId(), job.getId())); - } - return job.getStatus(); - } - - //If we are provisioning BUT the cloudSubmissionService has no record of the provisioning then we may have problems - if (job.getStatus().equals(JobBuilderController.STATUS_PROVISION) && - !cloudSubmissionService.isSubmitting(job, cloudComputeService)) { - - //Just to rule out a possible race condition - get the latest copy of the job from the DB to rule out the possibility - //of a state transition occuring since the last refresh (no state transition can occur once isSubmitting returns false) - job = jobManager.getJobById(cloudJob.getId(), stsArn, clientSecret, s3Role, cloudJob.getEmailAddress(), nciUser, nciProj, nciKey); - if (job.getStatus().equals(JobBuilderController.STATUS_PROVISION)) { - //if after all that we are confident that provisioning has failed AND the status still says provisioning, update to ERROR - return JobBuilderController.STATUS_ERROR; - } - } - - //Some states are terminated states, do absolutely nothing - //Other states are managed by the JobBuilder - if (job.getStatus().equals(JobBuilderController.STATUS_DONE) || - job.getStatus().equals(JobBuilderController.STATUS_UNSUBMITTED) || - job.getStatus().equals(JobBuilderController.STATUS_INQUEUE) || - job.getStatus().equals(JobBuilderController.STATUS_ERROR)|| - job.getStatus().equals(JobBuilderController.STATUS_WALLTIME_EXCEEDED) || - job.getStatus().equals(JobBuilderController.STATUS_PROVISION) || - job.getStatus().equals(JobBuilderController.STATUS_INQUEUE)) { - return job.getStatus(); - } - - //Get the output files for this job - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - log.warn(String.format("No cloud storage service with id '%1$s' for job '%2$s'. cannot update job status", job.getStorageServiceId(), job.getId())); - return job.getStatus(); - } - CloudFileInformation[] results = null; - try { - results = cloudStorageService.listJobFiles(job); - } catch (Exception e) { - log.error("Error listing job files for job " + job.getId() + ":" + e.getMessage()); - log.debug("Exception:", e); - return job.getStatus(); - } - - boolean jobStarted = containsFile(results, "workflow-version.txt"); - boolean jobFinished = containsFile(results, JobListController.VL_TERMINATION_FILE); - // VM side walltime exceeded - boolean jobWalltimeExceeded = containsFile(results, "walltime-exceeded.txt"); - - String expectedStatus = JobBuilderController.STATUS_PENDING; - if (jobFinished) { - expectedStatus = JobBuilderController.STATUS_DONE; - } else if (jobStarted) { - expectedStatus = JobBuilderController.STATUS_ACTIVE; - } else if(jobWalltimeExceeded) { - expectedStatus = JobBuilderController.STATUS_WALLTIME_EXCEEDED; - } - - // If the walltime has exceeded and the VM side walltime check has - // failed to shut the instance down, shut it down - if(jobStarted && !jobFinished && job.isWalltimeSet()) { - if(job.getSubmitDate().getTime() + (job.getWalltime()*60*1000) < new Date().getTime()) { - try { - cloudComputeService.terminateJob(job); - return JobBuilderController.STATUS_WALLTIME_EXCEEDED; - } catch(Exception e) { - log.warn("Exception shutting down terminal: " + job.toString(), e); - return JobBuilderController.STATUS_WALLTIME_EXCEEDED; - } - } - } - - //There is also a possibility that the cloud has had issues booting the VM... lets see what we can dig up - try { - switch (cloudComputeService.getJobStatus(job)) { - case Missing: - if (jobFinished) { - return JobBuilderController.STATUS_DONE; - } else if (jobWalltimeExceeded) { - return JobBuilderController.STATUS_WALLTIME_EXCEEDED; - } else { - return JobBuilderController.STATUS_ERROR; - } - case Pending: - case Running: - return expectedStatus; - } - } catch (Exception ex) { - log.warn("Exception looking up job VM status:" + job.toString(), ex); - return job.getStatus(); - } - - return expectedStatus; - } - - private static boolean containsFile(CloudFileInformation[] files, String fileName) { - if (files == null) { - return false; - } - - for (CloudFileInformation file : files) { - if (file.getName().endsWith(fileName) && file.getSize() > 0) { - return true; - } - } - - return false; - } -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/vegl/VLScmSnapshot.java b/src/main/java/org/auscope/portal/server/vegl/VLScmSnapshot.java deleted file mode 100644 index ac6acdda7..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VLScmSnapshot.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.Serializable; - -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; - -@Entity -//@Table() No table currently - legacy -public class VLScmSnapshot implements Serializable { - - private static final long serialVersionUID = -6638880820028925202L; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - - private String scmEntryId; - private String computeVmId; - private String computeServiceId; - - protected VLScmSnapshot() { - } - - public VLScmSnapshot(Integer id) { - this.id = id; - } - - public Integer getId() { - return id; - } - - protected void setId(Integer id) { - this.id = id; - } - - public String getScmEntryId() { - return scmEntryId; - } - - public void setScmEntryId(String scmEntryId) { - this.scmEntryId = scmEntryId; - } - - public String getComputeVmId() { - return computeVmId; - } - - public void setComputeVmId(String computeVmId) { - this.computeVmId = computeVmId; - } - - public String getComputeServiceId() { - return computeServiceId; - } - - public void setComputeServiceId(String computeServiceId) { - this.computeServiceId = computeServiceId; - } -} diff --git a/src/main/java/org/auscope/portal/server/vegl/VglDownload.java b/src/main/java/org/auscope/portal/server/vegl/VglDownload.java deleted file mode 100644 index faa35e071..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VglDownload.java +++ /dev/null @@ -1,296 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.Serializable; - -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.ManyToOne; -import jakarta.persistence.Table; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -/** - * Represents a remote file download step that occurs during a VL job startup - * @author Josh Vote - * - */ -@Entity -@Table(name="downloads") -public class VglDownload implements Serializable, Cloneable { - private static final long serialVersionUID = 5436097345907506395L; - - /** The primary key for this download*/ - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - /** The descriptive name of this download*/ - private String name; - /** The long description for this download*/ - private String description; - /** The actual URL that when accessed with a GET request will download data*/ - private String url; - /** Where the downloaded data (on the job VM) will be downloaded to*/ - private String localPath; - /** If this download is for a spatial region this will represent the most northern bounds of the region in WGS:84*/ - private Double northBoundLatitude; - /** If this download is for a spatial region this will represent the most southern bounds of the region in WGS:84*/ - private Double southBoundLatitude; - /** If this download is for a spatial region this will represent the most eastern bounds of the region in WGS:84*/ - private Double eastBoundLongitude; - /** If this download is for a spatial region this will represent the most western bounds of the region in WGS:84*/ - private Double westBoundLongitude; - /** The job that owns this download*/ - @JsonIgnore - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "jobId") - private VEGLJob parent; - - /* - * Removing these as can't see they're used and not defined in database, - * also affects JobDownloadController, TestJobDownloadController - */ - /** Organisation or person responsible for this data set */ - //private String owner; - /** Url of the data this is a subset of (if applicable) */ - //private String parentUrl; - /** Name of the data this is a subset of (if applicable) */ - //private String parentName; - - - /** - * Default constructor - */ - public VglDownload() { - this(null); - } - - /** - * - * @param id The primary key for this download - */ - public VglDownload(Integer id) { - super(); - this.id = id; - } - - /** - * The primary key for this download - * @return - */ - public Integer getId() { - return id; - } - - /** - * The primary key for this download - * @param id - */ - public void setId(Integer id) { - this.id = id; - } - - /** - * The descriptive name of this download - * @return - */ - public String getName() { - return name; - } - - /** - * The descriptive name of this download - * @param name - */ - public void setName(String name) { - this.name = name; - } - - /** - * The long description for this download - * @return - */ - public String getDescription() { - return description; - } - - /** - * The long description for this download - * @param description - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * The actual URL that when accessed with a GET request will download data - * @return - */ - public String getUrl() { - return url; - } - - /** - * The actual URL that when accessed with a GET request will download data - * @param url - */ - public void setUrl(String url) { - this.url = url; - } - - /** - * Where the downloaded data (on the job VM) will be downloaded to - * @return - */ - public String getLocalPath() { - return localPath; - } - - /** - * Where the downloaded data (on the job VM) will be downloaded to - * @param localPath - */ - public void setLocalPath(String localPath) { - this.localPath = localPath; - } - - /** - * If this download is for a spatial region this will represent the most northern bounds of the region in WGS:84 - * @return - */ - public Double getNorthBoundLatitude() { - return northBoundLatitude; - } - - /** - * If this download is for a spatial region this will represent the most northern bounds of the region in WGS:84 - * @param northBoundLatitude - */ - public void setNorthBoundLatitude(Double northBoundLatitude) { - this.northBoundLatitude = northBoundLatitude; - } - - /** - * If this download is for a spatial region this will represent the most southern bounds of the region in WGS:84 - * @return - */ - public Double getSouthBoundLatitude() { - return southBoundLatitude; - } - - /** - * If this download is for a spatial region this will represent the most southern bounds of the region in WGS:84 - * @param southBoundLatitude - */ - public void setSouthBoundLatitude(Double southBoundLatitude) { - this.southBoundLatitude = southBoundLatitude; - } - - /** - * If this download is for a spatial region this will represent the most eastern bounds of the region in WGS:84 - * @return - */ - public Double getEastBoundLongitude() { - return eastBoundLongitude; - } - - /** - * If this download is for a spatial region this will represent the most eastern bounds of the region in WGS:84 - * @param eastBoundLongitude - */ - public void setEastBoundLongitude(Double eastBoundLongitude) { - this.eastBoundLongitude = eastBoundLongitude; - } - - /** - * If this download is for a spatial region this will represent the most western bounds of the region in WGS:84 - * @return - */ - public Double getWestBoundLongitude() { - return westBoundLongitude; - } - - /** - * If this download is for a spatial region this will represent the most western bounds of the region in WGS:84 - * @param westBoundLongitude - */ - public void setWestBoundLongitude(Double westBoundLongitude) { - this.westBoundLongitude = westBoundLongitude; - } - - /* - public String getOwner() { - return owner; - } - - public void setOwner(String owner) { - this.owner = owner; - } - - public String getParentUrl() { - return parentUrl; - } - - public void setParentUrl(String parentUrl) { - this.parentUrl = parentUrl; - } - - public String getParentName() { - return parentName; - } - - public void setParentName(String parentName) { - this.parentName = parentName; - } - */ - - /** - * The job that owns this download - * @return - */ - public VEGLJob getParent() { - return parent; - } - - /** - * The job that owns this download - * @param parent - */ - public void setParent(VEGLJob parent) { - this.parent = parent; - } - - @Override - public Object clone() { - try { - return super.clone(); - } catch (CloneNotSupportedException e) { - return null; - } - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (!(obj instanceof VglDownload)) { - return false; - } - - return this.id.equals(((VglDownload)obj).id); - } - - @Override - public int hashCode() { - return this.id.hashCode(); - } -} diff --git a/src/main/java/org/auscope/portal/server/vegl/VglMachineImage.java b/src/main/java/org/auscope/portal/server/vegl/VglMachineImage.java deleted file mode 100644 index 2efd6fa71..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VglMachineImage.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.auscope.portal.server.vegl; - -import org.auscope.portal.core.cloud.MachineImage; - -/** - * Represents a single virtual machine image that can be used for spawning worker instances. - * - * Extends the original concept to include security permissions - * @author Josh Vote - * - */ -public class VglMachineImage extends MachineImage { - - private static final long serialVersionUID = 2136324684785829825L; - - /** List of roles that have been given the permission to use this image */ - private String[] permissions; - - /** - * Creates a new VglMachineImage object - * @param imageId - */ - public VglMachineImage(String imageId) { - super(imageId); - } - - /** - * List of roles that have been given the permission to use this image - * @return - */ - public String[] getPermissions() { - return permissions; - } - - /** - * List of roles that have been given the permission to use this image - * @param permissions - */ - public void setPermissions(String[] permissions) { - this.permissions = permissions; - } -} diff --git a/src/main/java/org/auscope/portal/server/vegl/VglParameter.java b/src/main/java/org/auscope/portal/server/vegl/VglParameter.java deleted file mode 100644 index 5bd55bfd7..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/VglParameter.java +++ /dev/null @@ -1,204 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.Serializable; - -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.ManyToOne; -import jakarta.persistence.Table; - -import com.fasterxml.jackson.annotation.JsonIgnore; - - - -/** - * A Job Parameter is a single 'typed' (very loosely) key/value pairing. - * - * A typical Job will have one or more parameter values created as the job is constructed. The parameter - * set is made available to any job scripts that get run - * - * @author Josh Vote - * - */ -@Entity -@Table(name="parameters") -public class VglParameter implements Serializable, Cloneable { - - private static final long serialVersionUID = -7474027234400180238L; - - /** - * The different types of parameter types - * @author Josh Vote - */ - public enum ParameterType { - string, - number - } - - /** The primary key for this parameter*/ - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - - /** The name of this parameter*/ - private String name; - - /** The value (as a string) of this parameter*/ - private String value; - - /** The 'type' of this parameter. Can be 'number' or 'string'*/ - private String type; - /** The job that owns this parameter*/ - - @JsonIgnore - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "jobId") - private VEGLJob parent; - - - /** - * Default constructor - */ - public VglParameter() { - this(null, null); - } - - /** - * Default constructor - */ - public VglParameter(Integer id, String name) { - this(id, name, null, null); - } - - /** - * Construct a fully populated instance - */ - public VglParameter(Integer id, String name, String value, String type) { - this(id, name, value, type, null); - } - - /** - * Construct a fully populated instance - */ - public VglParameter(Integer id, String name, String value, String type, VEGLJob parent) { - super(); - this.id = id; - this.name = name; - this.type = type; - this.value = value; - this.parent = parent; - } - - /** - * The primary key for this parameter - * @return - */ - public Integer getId() { - return id; - } - - /** - * The primary key for this parameter - * @param id - */ - public void setId(Integer id) { - this.id = id; - } - - /** - * The name of this parameter - * @return - */ - public String getName() { - return name; - } - - /** - * The name of this parameter - * @param name - */ - public void setName(String name) { - this.name = name; - } - - /** - * The value (as a string) of this parameter - * @return - */ - public String getValue() { - return value; - } - - /** - * The value (as a string) of this parameter - * @param value - */ - public void setValue(String value) { - this.value = value; - } - - /** - * The 'type' of this parameter. Can be 'number' or 'string' - * @return - */ - public String getType() { - return type; - } - - /** - * The 'type' of this parameter. Can be 'number' or 'string' - * @param type - */ - public void setType(String type) { - this.type = type; - } - - /** - * The job that owns this parameter - * @return - */ - public VEGLJob getParent() { - return parent; - } - - /** - * The job that owns this parameter - * @param parent - */ - public void setParent(VEGLJob parent) { - this.parent = parent; - } - - /** - * Tests two VglJobParameter objects for equality based on job id and name - */ - @Override - public boolean equals(Object o) { - if (o instanceof VglParameter) { - return this.parent.getId().equals(((VglParameter) o).parent.getId()) && this.name.equals(((VglParameter) o).name); - } - - return false; - } - - /** - * Gets a hashcode based of the job id and name parameters; - */ - @Override - public int hashCode() { - return name.hashCode() ^ parent.getId().hashCode(); - } - - @Override - public Object clone() { - try { - return super.clone(); - } catch (CloneNotSupportedException e) { - return null; - } - } -} diff --git a/src/main/java/org/auscope/portal/server/vegl/mail/JobCompletionMailSender.java b/src/main/java/org/auscope/portal/server/vegl/mail/JobCompletionMailSender.java deleted file mode 100644 index 022e8fb93..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/mail/JobCompletionMailSender.java +++ /dev/null @@ -1,293 +0,0 @@ -package org.auscope.portal.server.vegl.mail; - -import java.io.StringWriter; -import java.util.Date; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.velocity.VelocityContext; -import org.apache.velocity.app.VelocityEngine; -import org.auscope.portal.core.util.DateUtil; -import org.auscope.portal.core.util.TextUtil; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.mail.MailSender; -import org.springframework.mail.SimpleMailMessage; - -/** - * A concrete implementation of JobMailServer interface - * that responsible for constructing and sending out - * job completion notification. - * - * @author Richard Goh - */ -public class JobCompletionMailSender implements JobMailSender { - private final Log LOG = LogFactory.getLog(getClass()); - - //Properties that get injected thru constructor - private VEGLJobManager jobManager; - private VGLJobStatusAndLogReader jobStatLogReader; - private MailSender mailSender; - private VelocityEngine velocityEngine; - - //Properties that get injected thru setter methods - private String template; - private String dateFormat; - private int maxLengthForSeriesNameInSubject; - private int maxLengthForJobNameInSubject; - private int maxLinesForTail; - private String emailSender; - private String emailSubject; - - private String portalUrl=null; - - /** - * @return the portalUrl - */ - public String getPortalUrl() { - return portalUrl; - } - - /** - * @param portalUrl the portalUrl to set - */ - public void setPortalUrl(String portalUrl) { - this.portalUrl = portalUrl; - } - - @Autowired - public JobCompletionMailSender(VEGLJobManager jobManager, - VGLJobStatusAndLogReader jobStatLogReader, MailSender mailSender, - VelocityEngine velocityEngine) { - this.jobManager = jobManager; - this.jobStatLogReader = jobStatLogReader; - this.mailSender = mailSender; - this.velocityEngine = velocityEngine; - } - - /** - * Sets the job completion notification template file including - * its location on the class path. - * - * @param template - */ - public void setTemplate(String template) { - this.template = template; - } - - /** - * Gets the job completion notification template file path. - * - * @return - */ - public String getTemplate() { - return template; - } - - /** - * Sets the date format to be used in the email text. - * - * @param dateFormat The pattern describing the date and time format. - */ - public void setDateFormat(String dateFormat) { - this.dateFormat = dateFormat; - } - - /** - * Gets the date format used in the email text. - * - * @return the pattern describing the date and time format. - */ - public String getDateFormat() { - return dateFormat; - } - - /** - * Sets the maximum length for series name to be - * displayed in email subject. - * - * @param maxLengthForSeriesNameInSubject - */ - public void setMaxLengthForSeriesNameInSubject(int maxLengthForSeriesNameInSubject) { - this.maxLengthForSeriesNameInSubject = maxLengthForSeriesNameInSubject; - } - - /** - * Gets the maximum length of series name in the - * email subject. - * - * @return - */ - public int getMaxLengthForSeriesNameInSubject() { - return maxLengthForSeriesNameInSubject; - } - - /** - * Sets the maximum length for job name to be - * displayed on email subject. - * - * @param maxLengthForJobNameInSubject - */ - public void setMaxLengthForJobNameInSubject(int maxLengthForJobNameInSubject) { - this.maxLengthForJobNameInSubject = maxLengthForJobNameInSubject; - } - - /** - * Gets the maximum length of job name in the - * email subject. - * - * @return - */ - public int getMaxLengthForJobNameInSubject() { - return maxLengthForJobNameInSubject; - } - - /** - * Sets the maximum number of lines (N - counting from - * the bottom) to be used for getting the last N lines - * of text from the job execution log. - * - * @param maxLinesForTail - */ - public void setMaxLinesForTail(int maxLinesForTail) { - this.maxLinesForTail = maxLinesForTail; - } - - /** - * Get the maximum number of lines for tailing the - * job execution log. - * - * @return - */ - public int getMaxLinesForTail() { - return maxLinesForTail; - } - - /** - * Sets the email sender. - * - * @param emailSender - */ - public void setEmailSender(String emailSender) { - this.emailSender = emailSender; - } - - /** - * Gets the email sender. - * - * @return - */ - public String getEmailSender() { - return emailSender; - } - - /** - * Sets the email subject. - * - * @param emailSubject - */ - public void setEmailSubject(String emailSubject) { - this.emailSubject = emailSubject; - } - - /** - * Get the email subject. - * - * @return - */ - public String getEmailSubject() { - return emailSubject; - } - - /** - * Constructs job completion notification email content. - * @param seriesName - */ - @Override - public String constructMailContent(String seriesName, VEGLJob job) { - - Date submitDate, processDate, executeDate; - if(job.getSubmitDate()!=null){ - submitDate=job.getSubmitDate(); - }else{ - submitDate=new Date(); - } - - if(job.getProcessDate()!=null){ - processDate=job.getProcessDate(); - }else{ - processDate=new Date(); - } - - // If execution date failed to set revert to submission date - if(job.getExecuteDate()!=null){ - executeDate=job.getExecuteDate(); - }else{ - executeDate= job.getSubmitDate(); - } - - long[] diff = DateUtil.getTimeDifference(executeDate, processDate); - String timeElapsed = diff[0] + " day(s) " + diff[1] + " hour(s) " - + diff[2] + " minute(s) " + diff[3] + " second(s)"; - - VelocityContext velocityContext = new VelocityContext(); - velocityContext.put("userName", job.getUser().substring(0,job.getUser().indexOf("@"))); - velocityContext.put("status", job.getStatus()); - velocityContext.put("jobId", job.getId().toString()); - velocityContext.put("seriesName", seriesName); - velocityContext.put("jobName", job.getName()); - velocityContext.put("jobDescription", job.getDescription()); - velocityContext.put("dateSubmitted", DateUtil.formatDate(submitDate, dateFormat)); - velocityContext.put("dateExecuted", DateUtil.formatDate(executeDate, dateFormat)); - velocityContext.put("dateProcessed", DateUtil.formatDate(processDate, dateFormat)); - velocityContext.put("timeElapsed", timeElapsed); - velocityContext.put("jobExecLogSnippet", TextUtil.tail(jobStatLogReader.getSectionedLog(job, "Python"), maxLinesForTail)); - velocityContext.put("emailSender", getEmailSender()); - velocityContext.put("portalUrl", getPortalUrl()); - StringWriter stringWriter = new StringWriter(); - velocityEngine.mergeTemplate(template, "UTF-8", velocityContext, stringWriter); - return stringWriter.toString(); - } - - /** - * Sends job completion notification email with Spring - * framework's MailSender. - */ - @Override - public void sendMail(VEGLJob job) { - String jobName = job.getName(); - String seriesName = ""; - - if (job.getSeriesId() != null && job.getSeriesId() != 0) { - VEGLSeries jobSeries = jobManager.getSeriesById(job.getSeriesId(), job.getEmailAddress()); - if (jobSeries != null) { - seriesName = jobSeries.getName(); - - if (seriesName.length() > maxLengthForSeriesNameInSubject) { - seriesName = seriesName.substring(0, maxLengthForJobNameInSubject); - } - } - } - - if (jobName.length() > maxLengthForJobNameInSubject) { - jobName = jobName.substring(0, maxLengthForJobNameInSubject); - } - - String subject = String.format(this.emailSubject, jobName); - - SimpleMailMessage msg = new SimpleMailMessage(); - msg.setFrom(this.emailSender); - msg.setTo(job.getEmailAddress()); - msg.setSubject(subject); - msg.setText(constructMailContent(seriesName, job)); - - try { - this.mailSender.send(msg); - } catch (Exception ex) { - LOG.error("Sending of email notification failed for job id [" + job.getId() + "].", ex); - } - } -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/vegl/mail/JobMailSender.java b/src/main/java/org/auscope/portal/server/vegl/mail/JobMailSender.java deleted file mode 100644 index 7901845f6..000000000 --- a/src/main/java/org/auscope/portal/server/vegl/mail/JobMailSender.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.auscope.portal.server.vegl.mail; - -import org.auscope.portal.server.vegl.VEGLJob; - -/** - * An interface with common methods for any beans that - * need to implement send mail functionality. - * - * @author Richard Goh - */ -public interface JobMailSender { - /** - * Constructs job notification email content. - * - * @param seriesName The series name - * @param job The VEGLJob object - * @return - */ - public String constructMailContent(String seriesName, VEGLJob job); - - /** - * Sends email with SMTP protocol. - * - * @param job - */ - public void sendMail(VEGLJob job); - -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/web/controllers/BaseCloudController.java b/src/main/java/org/auscope/portal/server/web/controllers/BaseCloudController.java deleted file mode 100644 index d30ce967a..000000000 --- a/src/main/java/org/auscope/portal/server/web/controllers/BaseCloudController.java +++ /dev/null @@ -1,234 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.services.cloud.CloudStorageServiceJClouds; -import org.auscope.portal.core.util.TextUtil; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.springframework.beans.factory.annotation.Value; - -/** - * Methods and variables common to any controller wishing to access - * the cloud - * - * @author Josh Vote - * - */ -public abstract class BaseCloudController extends BaseModelController { - /** All cloud storage services that are available to this controller */ - protected CloudStorageService[] cloudStorageServices; - /** All cloud compute services that are available to this controller */ - protected CloudComputeService[] cloudComputeServices; - private String vmSh, vmShutdownSh; - - - /** - * @param cloudStorageServices All cloud storage services that are available to this controller - * @param cloudComputeServices All cloud compute services that are available to this controller - */ - public BaseCloudController(CloudStorageService[] cloudStorageServices, CloudComputeService[] cloudComputeServices, VEGLJobManager jobManager) { - this(cloudStorageServices,cloudComputeServices, jobManager,null,null); - } - - public BaseCloudController(CloudStorageService[] cloudStorageServices, CloudComputeService[] cloudComputeServices, VEGLJobManager jobManager, - @Value("${cloud.vm.sh}") String vmSh, @Value("${cloud.vm-shutdown.sh}") String vmShutdownSh) { - super(jobManager); - this.cloudComputeServices = cloudComputeServices; - this.cloudStorageServices = cloudStorageServices; - this.vmSh=vmSh; - this.vmShutdownSh=vmShutdownSh; - } - - - /** - * Lookup a cloud storage service by ID. Returns null if the service DNE - * @param id The name of the service to lookup - * @return - */ - protected CloudStorageService getStorageService(String id) { - for (CloudStorageService s : cloudStorageServices) { - if (s.getId().equals(id)) { - return s; - } - } - - log.warn(String.format("CloudStorageService with ID '%1$s' doesn't exist", id)); - return null; - } - - /** - * Lookup a cloud storage service by ID (as configured in job). Returns null if the service DNE - * @param id The job whose storage service id will be used in the lookup - * @return - */ - protected CloudStorageService getStorageService(VEGLJob job) { - return getStorageService(job.getStorageServiceId()); - } - - /** - * Lookup a cloud compute service by ID. Returns null if the service DNE - * @param id The name of the service to lookup - * @return - */ - protected CloudComputeService getComputeService(String id) { - if(TextUtil.isNullOrEmpty(id)) - return null; - - for (CloudComputeService s : cloudComputeServices) { - if (s.getId().equals(id)) { - return s; - } - } - - log.warn(String.format("CloudComputeService with ID '%1$s' doesn't exist", id)); - return null; - } - - /** - * Gets the subset of cloudComputeServices that the specified user has successfully configured in their setup page. - * @param user - * @param nciDetailsService - * @return - * @throws PortalServiceException - */ - protected List getConfiguredComputeServices(PortalUser user, NCIDetailsService nciDetailsService) throws PortalServiceException { - return getConfiguredComputeServices(user, nciDetailsService, cloudComputeServices); - } - - /** - * Gets the subset of cloudComputeServices that the specified user has successfully configured in their setup page. - * @param user - * @param nciDetailsService - * @return - * @throws PortalServiceException - */ - public static List getConfiguredComputeServices(PortalUser user, NCIDetailsService nciDetailsService, CloudComputeService[] cloudComputeServices) throws PortalServiceException { - List configuredServices = new ArrayList(cloudComputeServices.length); - for (CloudComputeService ccs : cloudComputeServices) { - - switch(ccs.getId()) { - case "aws-ec2-compute": - if (StringUtils.isNotEmpty(user.getArnExecution()) && StringUtils.isNotEmpty(user.getArnStorage())) { - configuredServices.add(ccs); - } - break; - case "nci-gadi-compute": - NCIDetails details = nciDetailsService.getByUser(user); - if (details != null && StringUtils.isNotEmpty(details.getKey())) { - configuredServices.add(ccs); - } - break; - case "nectar-nova-compute": - if (user.getId().contains("@")) { //HACK - this is assuming that AAF ID's will be an email and contain an '@' where google OAuth will not. - configuredServices.add(ccs); - } - break; - default: - configuredServices.add(ccs); - break; - - } - } - - return configuredServices; - } - - - /** - * Lookup a cloud compute service by ID (as configured in job). Returns null if the service DNE - * @param id The job whose compute service id will be used in the lookup - * @return - */ - protected CloudComputeService getComputeService(VEGLJob job) { - return getComputeService(job.getComputeServiceId()); - } - - /** - * Loads the bootstrap shell script template as a string. - * @return - * @throws IOException - */ - private String getBootstrapTemplate() throws IOException { - try (InputStream is = this.getClass().getResourceAsStream("vl-bootstrap.sh")) { - String template = IOUtils.toString(is, StandardCharsets.UTF_8); - return template.replaceAll("\r", ""); // Windows style file endings - // have a tendency to sneak in - // via StringWriter and the - // like - } - } - - /** - * Return the provisioning template as a string. - * - * @return String template - * @throws IOException if fails to load template resource - */ - private String getProvisioningTemplate() throws IOException { - try (InputStream is = getClass().getResourceAsStream("vl-provisioning.sh")) { - String template = IOUtils.toString(is, StandardCharsets.UTF_8); - return template.replaceAll("\r", ""); // Windows style file endings - // have a tendency to sneak in - // via StringWriter and the - // like - } - } - - /** - * Creates a bootstrap shellscript for job that will be sent to - * cloud VM instance to kick start the work for job. - * @param job - * @return - * @throws IOException - */ - public String createBootstrapForJob(VEGLJob job) throws IOException { - String bootstrapTemplate = getBootstrapTemplate(); - CloudStorageService cloudStorageService = getStorageService(job); - - boolean useSts = false; - if (cloudStorageService instanceof CloudStorageServiceJClouds) { - switch(((CloudStorageServiceJClouds)cloudStorageService).getStsRequirement()) { - case ForceNone: - useSts = false; - break; - case Mandatory: - useSts = true; - case Permissable: - useSts = !TextUtil.isNullOrEmpty(job.getProperty(CloudJob.PROPERTY_STS_ARN)); - } - } - - Object[] arguments = new Object[] { - job.getStorageBucket(), // STORAGE_BUCKET - job.getStorageBaseKey().replace("//", "/"), // STORAGE_BASE_KEY_PATH - useSts ? "" : cloudStorageService.getAccessKey(), // STORAGE_ACCESS_KEY - useSts ? "" : cloudStorageService.getSecretKey(), // STORAGE_SECRET_KEY - vmSh, // WORKFLOW_URL - cloudStorageService.getEndpoint(), // STORAGE_ENDPOINT - cloudStorageService.getProvider(), // STORAGE_TYPE - cloudStorageService.getAuthVersion() == null ? "" : cloudStorageService.getAuthVersion(), // STORAGE_AUTH_VERSION - cloudStorageService.getRegionName() == null ? "" : cloudStorageService.getRegionName(), // OS_REGION_NAME - getProvisioningTemplate(), // PROVISIONING_TEMPLATE - vmShutdownSh, // WORKFLOW_URL - job.isWalltimeSet() ? job.getWalltime() : 0 // WALLTIME - }; - - String result = MessageFormat.format(bootstrapTemplate, arguments); - return result; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/controllers/BaseModelController.java b/src/main/java/org/auscope/portal/server/web/controllers/BaseModelController.java deleted file mode 100644 index 9f60a886a..000000000 --- a/src/main/java/org/auscope/portal/server/web/controllers/BaseModelController.java +++ /dev/null @@ -1,109 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import org.auscope.portal.core.server.controllers.BasePortalController; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.web.security.PortalUser; -import org.springframework.security.access.AccessDeniedException; - -/** - * Base Controller class for controllers wishing to access and modify the underlying - * model (database) for a virtual lab. - * @author Josh Vote (CSIRO) - * - */ -public class BaseModelController extends BasePortalController { - - protected VEGLJobManager jobManager; - - protected BaseModelController(VEGLJobManager jobManager) { - this.jobManager = jobManager; - } - - /** - * Attempts to get a job with a particular ID. If the job ID does NOT belong to the current - * user session null will be returned. - * - * This function will log all appropriate errors. - * @param jobId - * @return The VEGLJob object on success or null otherwise. - */ - protected VEGLJob attemptGetJob(Integer jobId, PortalUser user) { - log.trace("Getting job with ID " + jobId); - VEGLJob job = null; - - //Check we have a user email - if (user == null || user.getEmail() == null) { - log.warn("The current session is missing an email attribute"); - return null; - } - - //Attempt to fetch our job - if (jobId != null) { - try { - job = jobManager.getJobById(jobId.intValue(), user); - log.debug("Job [" + job.hashCode() + "] retrieved by jobManager [" + jobManager.hashCode() + "]"); - } catch (AccessDeniedException e) { - throw e; - } catch (Exception ex) { - log.error(String.format("Exception when accessing jobManager for job id '%1$s'", jobId), ex); - return null; - } - } - - if (job == null) { - log.warn(String.format("Job with ID '%1$s' does not exist", jobId)); - return null; - } - - //Check user matches job - if (!user.getEmail().equals(job.getUser())) { - log.warn(String.format("%1$s's attempt to fetch %2$s's job denied!", user, job.getUser())); - throw new AccessDeniedException(String.format("%1$s doesn't have permission to access job %2$s", user, jobId)); - } - - return job; - } - - /** - * Attempts to get a series with a particular ID. If the series ID does NOT belong to the current - * user session null will be returned. - * - * This function will log all appropriate errors. - * @param jobId - * @return The VEGLSeries object on success or null otherwise. - */ - protected VEGLSeries attemptGetSeries(Integer seriesId, PortalUser user) { - VEGLSeries series = null; - - //Check we have a user email - if (user == null || user.getEmail() == null) { - log.warn("The current session is missing an email attribute"); - return null; - } - - //Attempt to fetch our job - if (seriesId != null) { - try { - series = jobManager.getSeriesById(seriesId.intValue(), user.getEmail()); - } catch (Exception ex) { - log.error(String.format("Exception when accessing jobManager for series id '%1$s'", seriesId), ex); - return null; - } - } - - if (series == null) { - log.warn(String.format("Series with ID '%1$s' does not exist", seriesId)); - return null; - } - - //Check user matches job - if (!user.getEmail().equals(series.getUser())) { - log.warn(String.format("%1$s's attempt to fetch %2$s's job denied!", user, series.getUser())); - throw new AccessDeniedException(String.format("%1$s doesn't have permission to access series %2$s", user, seriesId)); - } - - return series; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/controllers/JobBuilderController.java b/src/main/java/org/auscope/portal/server/web/controllers/JobBuilderController.java deleted file mode 100644 index f50c4338c..000000000 --- a/src/main/java/org/auscope/portal/server/web/controllers/JobBuilderController.java +++ /dev/null @@ -1,1351 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.io.File; -import java.io.IOException; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; -import jakarta.servlet.http.HttpSession; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.core.cloud.ComputeType; -import org.auscope.portal.core.cloud.MachineImage; -import org.auscope.portal.core.cloud.StagedFile; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudComputeServiceAws; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.services.cloud.CloudStorageServiceJClouds; -import org.auscope.portal.core.services.cloud.FileStagingService; -import org.auscope.portal.core.services.cloud.STSRequirement; -import org.auscope.portal.core.util.TextUtil; -import org.auscope.portal.server.gridjob.FileInformation; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.vegl.VglMachineImage; -import org.auscope.portal.server.vegl.VglParameter; -import org.auscope.portal.server.vegl.VglParameter.ParameterType; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.ANVGLProvenanceService; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.CloudSubmissionService; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.auscope.portal.server.web.service.ScmEntryService; -import org.auscope.portal.server.web.service.cloud.CloudComputeServiceNci; -import org.auscope.portal.server.web.service.monitor.VGLJobStatusChangeHandler; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.beans.propertyeditors.CustomDateEditor; -import org.springframework.http.HttpStatus; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.stereotype.Controller; -import org.springframework.ui.ModelMap; -import org.springframework.web.bind.WebDataBinder; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.InitBinder; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.multipart.MultipartHttpServletRequest; -import org.springframework.web.servlet.ModelAndView; - -/** - * Controller for the job submission view. - * - * @author Cihan Altinay - * @author Abdi Jama - * @author Josh Vote - */ -@Controller -public class JobBuilderController extends BaseCloudController { - - /** Logger for this class */ - private final Log logger = LogFactory.getLog(getClass()); - - private FileStagingService fileStagingService; - private ScmEntryService scmEntryService; - private ANVGLProvenanceService anvglProvenanceService; - private String adminEmail = null; - private String defaultToolbox = null; - private CloudSubmissionService cloudSubmissionService; - private PortalUserService userService; - - @Autowired - private NCIDetailsService nciDetailsService; - - /** - * @return the adminEmail - */ - public String getAdminEmail() { - return adminEmail; - } - - - /** - * @param adminEmail the adminEmail to set - */ - public void setAdminEmail(String adminEmail) { - this.adminEmail = adminEmail; - } - - public static final String STATUS_PENDING = "Pending";//VT:Request accepted by compute service - public static final String STATUS_ACTIVE = "Active";//VT:Running - public static final String STATUS_PROVISION = "Provisioning";//VT:awaiting response from compute service - public static final String STATUS_DONE = "Done";//VT:Job done - public static final String STATUS_DELETED = "Deleted";//VT:Job deleted - public static final String STATUS_UNSUBMITTED = "Saved";//VT:Job saved, fail to submit for whatever reason. - public static final String STATUS_INQUEUE = "In Queue";//VT: quota exceeded, placed in queue. - public static final String STATUS_ERROR = "ERROR";//VT:Exception in job processing. - public static final String STATUS_WALLTIME_EXCEEDED = "WALLTIME EXCEEDED";//VT:Walltime exceeded. - - public static final String SUBMIT_DATE_FORMAT_STRING = "yyyyMMdd_HHmmss"; - - public static final String DOWNLOAD_SCRIPT = "vl-download.sh"; - VGLJobStatusChangeHandler vglJobStatusChangeHandler; - - @Autowired - public JobBuilderController(@Value("${portalAdminEmail}") String adminEmail, - @Value("${cloud.sssc.defaultToolbox}") String defaultToolbox, - VEGLJobManager jobManager, FileStagingService fileStagingService, - @Value("${cloud.vm.sh}") String vmSh, @Value("${cloud.vm-shutdown.sh}") String vmShutdownSh, - CloudStorageService[] cloudStorageServices, - CloudComputeService[] cloudComputeServices, - PortalUserService userService, - VGLJobStatusChangeHandler vglJobStatusChangeHandler, - ScmEntryService scmEntryService, ANVGLProvenanceService anvglProvenanceService, - CloudSubmissionService cloudSubmissionService) { - super(cloudStorageServices, cloudComputeServices, jobManager,vmSh,vmShutdownSh); - this.fileStagingService = fileStagingService; - this.cloudStorageServices = cloudStorageServices; - this.cloudComputeServices = cloudComputeServices; - this.userService = userService; - this.vglJobStatusChangeHandler=vglJobStatusChangeHandler; - this.scmEntryService = scmEntryService; - this.anvglProvenanceService = anvglProvenanceService; - this.adminEmail=adminEmail; - this.defaultToolbox = defaultToolbox; - this.cloudSubmissionService = cloudSubmissionService; - } - - - /** - * Returns a JSON object containing a populated VEGLJob object. - * - * @param request The servlet request - * @param response The servlet response - * - * @return A JSON object with a data attribute containing a populated - * VEGLJob object and a success attribute. - */ - @RequestMapping("/secure/getJobObject.do") - public ModelAndView getJobObject(@RequestParam("jobId") String jobId) { - PortalUser user = userService.getLoggedInUser(); - try { - VEGLJob job = attemptGetJob(Integer.parseInt(jobId), user); - if (job == null) { - return generateJSONResponseMAV(false); - } - - return generateJSONResponseMAV(true, Arrays.asList(job), ""); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - } - - /** - * Utility for converting between a StagedFile and FileInformation object - * @param file - * @return - */ - private static FileInformation stagedFileToFileInformation(StagedFile file) { - File internalFile = file.getFile(); - long length = internalFile == null ? 0 : internalFile.length(); - return new FileInformation(file.getName(), length, false, ""); - } - - /** - * Returns a JSON object containing an array of filenames and sizes which - * are currently in the job's stage in directory. - * - * @param request The servlet request - * @param response The servlet response - * - * @return A JSON object with a files attribute which is an array of - * filenames. - */ - @RequestMapping("/secure/stagedJobFiles.do") - public ModelAndView stagedJobFiles(@RequestParam("jobId") String jobId) { - PortalUser user = userService.getLoggedInUser(); - //Lookup our job - VEGLJob job = null; - try { - job = attemptGetJob(Integer.parseInt(jobId), user); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - //Get our files - StagedFile[] files = null; - try { - files = fileStagingService.listStageInDirectoryFiles(job); - } catch (Exception ex) { - logger.error("Error listing job stage in directory", ex); - return generateJSONResponseMAV(false, null, "Error reading job stage in directory"); - } - List fileInfos = new ArrayList<>(); - for (StagedFile file : files) { - fileInfos.add(stagedFileToFileInformation(file)); - } - - return generateJSONResponseMAV(true, fileInfos, ""); - } - - /** - * Sends the contents of a input job file to the client. - * - * @param request The servlet request including a filename parameter - * - * @param response The servlet response receiving the data - * - * @return null on success or the joblist view with an error parameter on - * failure. - * @throws IOException - */ - @RequestMapping("/secure/downloadInputFile.do") - public ModelAndView downloadFile(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") String jobId, - @RequestParam("filename") String filename) throws Exception { - PortalUser user = userService.getLoggedInUser(); - //Lookup our job and download the specified files (any exceptions will return a HTTP 503) - VEGLJob job = attemptGetJob(Integer.parseInt(jobId), user); - if (job == null) { - response.sendError(HttpStatus.INTERNAL_SERVER_ERROR.value(), "Couldnt access job with that ID"); - return null; - } - fileStagingService.handleFileDownload(job, filename, response); - return null; - } - - /** - * Processes a file upload request returning a JSON object which indicates - * whether the upload was successful and contains the filename and file - * size. - * - * @param request The servlet request - * @param response The servlet response containing the JSON data - * - * @return null - */ - @RequestMapping("/secure/uploadFile.do") - public ModelAndView uploadFile(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") String jobId) { - PortalUser user = userService.getLoggedInUser(); - //Lookup our job - VEGLJob job = null; - try { - job = attemptGetJob(Integer.parseInt(jobId), user); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - - if (job == null) { - return generateHTMLResponseMAV(false, null, ""); - } - - //Handle incoming file - StagedFile file = null; - try { - file = fileStagingService.handleFileUpload(job, (MultipartHttpServletRequest) request); - } catch (Exception ex) { - logger.error("Error uploading file", ex); - return generateJSONResponseMAV(false, null, "Error uploading file"); - } - FileInformation fileInfo = stagedFileToFileInformation(file); - - //We have to use a HTML response due to ExtJS's use of a hidden iframe for file uploads - //Failure to do this will result in the upload working BUT the user will also get prompted - //for a file download containing the encoded response from this function (which we don't want). - return generateHTMLResponseMAV(true, Arrays.asList(fileInfo), ""); - } - - /** - * Deletes one or more uploaded files of the current job. - * - * @param request The servlet request - * @param response The servlet response - * - * @return A JSON object with a success attribute that indicates whether - * the files were successfully deleted. - */ - @RequestMapping("/secure/deleteFiles.do") - public ModelAndView deleteFiles(@RequestParam("jobId") String jobId, - @RequestParam("fileName") String[] fileNames) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = null; - try { - job = attemptGetJob(Integer.parseInt(jobId), user); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - for (String fileName : fileNames) { - boolean success = fileStagingService.deleteStageInFile(job, fileName); - logger.debug("Deleting " + fileName + " success=" + success); - } - - return generateJSONResponseMAV(true, null, ""); - } - - /** - * Deletes one or more job downloads for the current job. - * - * @param request The servlet request - * @param response The servlet response - * - * @return A JSON object with a success attribute that indicates whether - * the downloads were successfully deleted. - */ - @RequestMapping("/secure/deleteDownloads.do") - public ModelAndView deleteDownloads(@RequestParam("jobId") String jobId, - @RequestParam("downloadId") Integer[] downloadIds) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = null; - try { - job = attemptGetJob(Integer.parseInt(jobId), user); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - //Delete the specified ID's - Iterator dlIterator = job.getJobDownloads().iterator(); - while (dlIterator.hasNext()) { - VglDownload dl = dlIterator.next(); - for (Integer id : downloadIds) { - if (id.equals(dl.getId())) { - dlIterator.remove(); - break; - } - } - } - - try { - jobManager.saveJob(job); - } catch (Exception ex) { - logger.error("Error saving job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error saving job with id " + jobId); - } - - return generateJSONResponseMAV(true, null, ""); - } - - /** - * Get status of the current job submission. - * - * @param request The servlet request - * @param response The servlet response - * - * @return A JSON object with a success attribute that indicates the status. - * - */ - @RequestMapping("/secure/getJobStatus.do") - public ModelAndView getJobStatus(@RequestParam("jobId") String jobId) { - PortalUser user = userService.getLoggedInUser(); - //Get our job - VEGLJob job = null; - try { - job = attemptGetJob(Integer.parseInt(jobId), user); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - return generateJSONResponseMAV(true, job.getStatus(), ""); - } - - /** - * Cancels the current job submission. Called to clean up temporary files. - * - * @param request The servlet request - * @param response The servlet response - * - * @return null - */ - @RequestMapping("/secure/cancelSubmission.do") - public ModelAndView cancelSubmission(@RequestParam("jobId") String jobId) { - PortalUser user = userService.getLoggedInUser(); - //Get our job - VEGLJob job = null; - try { - job = attemptGetJob(Integer.parseInt(jobId), user); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - boolean success = fileStagingService.deleteStageInDirectory(job); - return generateJSONResponseMAV(success, null, ""); - } - - /** - * This is for converting our String dates (frontend) to actual data objects (backend). - * - * Date format will match CloudJob.DATE_FORMAT, null/empty strings will be bound as NULL - * @param binder - */ - @InitBinder - public void initBinder(WebDataBinder binder) { - CustomDateEditor editor = new CustomDateEditor(new SimpleDateFormat(CloudJob.DATE_FORMAT), true); - binder.registerCustomEditor(Date.class, editor); - } - - - /** - * Given an entire job object this function attempts to save the specified job with ID - * to the internal database. If the Job DNE (or id is null), the job will be created and - * have it's staging area initialised and other creation specific tasks performed. - * - * @return A JSON object with a success attribute that indicates whether - * the job was successfully updated. The data object will contain the updated job - * @return - * @throws ParseException - */ - @RequestMapping("/secure/updateOrCreateJob.do") - public ModelAndView updateOrCreateJob(@RequestParam(value="id", required=false) Integer id, //The integer ID if not specified will trigger job creation - @RequestParam(value="name", required=false) String name, - @RequestParam(value="description", required=false) String description, - @RequestParam(value="seriesId", required=false) Integer seriesId, - @RequestParam(value="computeServiceId", required=false) String computeServiceId, - @RequestParam(value="computeVmId", required=false) String computeVmId, - @RequestParam(value="computeVmRunCommand", required=false) String computeVmRunCommand, - @RequestParam(value="computeTypeId", required=false) String computeTypeId, - @RequestParam(value="ncpus", required=false) Integer ncpus, - @RequestParam(value="jobfs", required=false) Integer jobFs, - @RequestParam(value="mem", required=false) Integer mem, - @RequestParam(value="registeredUrl", required=false) String registeredUrl, - @RequestParam(value="emailNotification", required=false) boolean emailNotification, - @RequestParam(value="walltime", required=false) Integer walltime, - @RequestParam(value="annotations", required=false) String[] annotations, - HttpServletRequest request) { - PortalUser user = userService.getLoggedInUser(); - //Get our job - VEGLJob job = null; - try { - //If we have an ID - look up the job, otherwise create a job - if (id == null) { - //Job creation involves a fair bit of initialisation on the server - job = initialiseVEGLJob(request.getSession(), user); - } else { - job = attemptGetJob(id, user); - } - } catch (AccessDeniedException e) { - throw e; - } catch (Exception ex) { - logger.error(String.format("Error creating/fetching job with id %1$s", id), ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + id); - } - - if (job == null) { - logger.error(String.format("Error creating/fetching job with id %1$s", id)); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + id); - } - - //JSON encoding of series ID can sometimes turn a null into a 0. We will also never have a seriesId of 0 - if (seriesId != null && seriesId == 0) { - seriesId = null; - } - - //Update our job from the request parameters - job.setSeriesId(seriesId); - job.setName(name); - job.setDescription(description); - job.setComputeVmId(computeVmId); - job.setComputeVmRunCommand(computeVmRunCommand); - job.setEmailNotification(emailNotification); - job.setWalltime(walltime); - job.setAnnotations((annotations != null) ? Arrays.asList(annotations) : new ArrayList()); - - //HPC doesn't support compute types - for this case we munge our parameters into a the compute instance type string - if (StringUtils.isEmpty(computeTypeId)) { - job.setComputeInstanceType(String.format("ncpus=%1$d&jobfs=%2$dgb&mem=%3$dgb", ncpus, jobFs, mem)); - } else { - job.setComputeInstanceType(computeTypeId); - } - - // Don't allow the user to specify a cloud compute service that DNE - // Updating the compute service means updating the dev keypair - // We also auto choose storage service based on compute service selection - if (computeServiceId != null) { - CloudComputeService ccs = getComputeService(computeServiceId); - if (ccs == null) { - logger.error(String.format("Error fetching compute service with id %1$s", computeServiceId)); - return generateJSONResponseMAV(false, null, "No compute/storage service with those ID's"); - } - - //Choose a storage service appropriate for the compute service (we could also make this configurable) - CloudStorageService css = null; - if (ccs instanceof CloudComputeServiceAws) { - css = getStorageService("amazon-aws-storage-sydney"); - } else if (ccs instanceof CloudComputeServiceNci) { - css = getStorageService("nci-gadi-storage"); - } else { - css = getStorageService("nectar-openstack-storage-melb"); - } - if (css == null) { - logger.error(String.format("Error fetching storage service linked to compute service id %1$s", computeServiceId)); - return generateJSONResponseMAV(false, null, "No linked storage service exists for your compute service selection"); - } - - //We may need to specify ARN details depending on the service we are using - STSRequirement stsReq = STSRequirement.ForceNone; - if (css instanceof CloudStorageServiceJClouds) { - stsReq = ((CloudStorageServiceJClouds) css).getStsRequirement(); - } - switch (stsReq) { - case Permissable: - if (StringUtils.isEmpty(user.getArnStorage())) { - job.setStorageBucket(css.getBucket()); - } else { - job.setStorageBucket(user.getS3Bucket()); - } - break; - case Mandatory: - job.setStorageBucket(user.getS3Bucket()); - break; - case ForceNone: - job.setStorageBucket(css.getBucket()); - break; - } - - job.setComputeServiceId(computeServiceId); - job.setStorageServiceId(css.getId()); - job.setStorageBaseKey(css.generateBaseKey(job)); - } else { - job.setComputeServiceId(null); - job.setStorageServiceId(null); - job.setStorageBaseKey(null); - } - - //Save the VEGL job - try { - jobManager.saveJob(job); - } catch (Exception ex) { - logger.error("Error updating job " + job, ex); - return generateJSONResponseMAV(false, null, "Error saving job"); - } - - return generateJSONResponseMAV(true, Arrays.asList(job), ""); - } - - /** - * Given an entire job object this function attempts to save the specified job with ID - * to the internal database. If the Job DNE (or id is null), the job will be created and - * have it's staging area initialised and other creation specific tasks performed. - * - * @return A JSON object with a success attribute that indicates whether - * the job was successfully updated. The data object will contain the updated job - * @return - * @throws ParseException - */ - @RequestMapping("/secure/updateJobSeries.do") - public ModelAndView updateJobSeries(@RequestParam(value="id", required=true) Integer id, //The integer ID if not specified will trigger job creation - @RequestParam(value="folderName", required=true) String folderName, //Name of the folder to move to - HttpServletRequest request) { - PortalUser user = userService.getLoggedInUser(); - //Get our job - VEGLJob job = null; - Integer seriesId=null; - List series = jobManager.querySeries(user.getEmail(), folderName, null); - if(series.isEmpty()){ - return generateJSONResponseMAV(false, null,"Series not found"); - }else{ - seriesId=series.get(0).getId(); - } - - try { - job = attemptGetJob(id, user); - } catch (AccessDeniedException e) { - throw e; - } catch (Exception ex) { - logger.error(String.format("Error creating/fetching job with id %1$s", id), ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + id); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - //Update our job from the request parameters - job.setSeriesId(seriesId); - - //Save the VEGL job - try { - jobManager.saveJob(job); - } catch (Exception ex) { - logger.error("Error updating series for job " + job, ex); - return generateJSONResponseMAV(false, null, "Error updating series"); - } - - return generateJSONResponseMAV(true, Arrays.asList(job), ""); - } - - /** - * Given a job with specified ID and a list of download objects, - * save the download objects to the database. - * - * The download objects are defined piecewise as an array of name/description/url and localPath values. - * - * The Nth download object will be defined as a combination of - * names[N], descriptions[N], urls[N] and localPaths[N] - * - * @param append If true, the parsed downloaded will append themselves to the existing job. If false, they will replace all downloads for the existing job - * @return - * @throws ParseException - */ - @RequestMapping("/secure/updateJobDownloads.do") - public ModelAndView updateJobDownloads(@RequestParam("id") Integer id, //The integer ID is the only required value - @RequestParam(required=false, value="append", defaultValue="false") String appendString, - @RequestParam("name") String[] names, - @RequestParam("description") String[] descriptions, - @RequestParam("url") String[] urls, - @RequestParam("localPath") String[] localPaths, - @RequestParam(required=false, name="northBoundLatitude") Double[] northBoundLatitudes, - @RequestParam(required=false, name="eastBoundLongitude") Double[] eastBoundLongitudes, - @RequestParam(required=false, name="southBoundLatitude") Double[] southBoundLatitudes, - @RequestParam(required=false, name="westBoundLongitude") Double[] westBoundLongitudes) { - PortalUser user = userService.getLoggedInUser(); - boolean append = Boolean.parseBoolean(appendString); - - List parsedDownloads = new ArrayList<>(); - for (int i = 0; i < urls.length && i < names.length && i < descriptions.length && i < localPaths.length; i++) { - VglDownload newDl = new VglDownload(); - newDl.setName(names[i]); - newDl.setDescription(descriptions[i]); - newDl.setUrl(urls[i]); - newDl.setLocalPath(localPaths[i]); - if(northBoundLatitudes != null) - newDl.setNorthBoundLatitude(northBoundLatitudes[i]); - if(eastBoundLongitudes != null) - newDl.setEastBoundLongitude(eastBoundLongitudes[i]); - if(southBoundLatitudes != null) - newDl.setSouthBoundLatitude(southBoundLatitudes[i]); - if(eastBoundLongitudes != null) - newDl.setWestBoundLongitude(westBoundLongitudes[i]); - parsedDownloads.add(newDl); - } - - //Lookup the job - VEGLJob job; - try { - job = attemptGetJob(id, user); - } catch (AccessDeniedException e) { - throw e; - } catch (Exception ex) { - logger.error("Error looking up job with id " + id + " :" + ex.getMessage()); - logger.debug("Exception:", ex); - return generateJSONResponseMAV(false, null, "Unable to access job"); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - if (append) { - List existingDownloads = new ArrayList(); - for(VglDownload dl: job.getJobDownloads()) { - VglDownload dlClone = (VglDownload)dl.clone(); - //dlClone.setId(null); - existingDownloads.add(dlClone); - } - existingDownloads.addAll(parsedDownloads); - job.setJobDownloads(existingDownloads); - /* - List existingDownloads = job.getJobDownloads(); - existingDownloads.addAll(parsedDownloads); - job.setJobDownloads(existingDownloads); - */ - } else { - // Carsten 17/06/2019: Should no longer be necessary if setJobDownloads now does its job properly: - // jobManager.deleteJobDownloads(job); - job.setJobDownloads(parsedDownloads); - } - - //Save the VEGL job - try { - jobManager.saveJob(job); - } catch (Exception ex) { - logger.error("Error updating job downloads" + job, ex); - return generateJSONResponseMAV(false, null, "Error saving job"); - } - - return generateJSONResponseMAV(true, null, ""); - } - - /** - * Given the ID of a job - lookup the appropriate job object and associated list of downloads objects. - * - * Return them as an array of JSON serialised VglDownload objects. - * @param jobId - * @return - */ - @RequestMapping("/secure/getJobDownloads.do") - public ModelAndView getJobDownloads(@RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - //Lookup the job - VEGLJob job; - try { - job = attemptGetJob(jobId, user); - } catch (Exception ex) { - logger.error("Error looking up job with id " + jobId + " :" + ex.getMessage()); - logger.debug("Exception:", ex); - return generateJSONResponseMAV(false, null, "Unable to access job"); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - return generateJSONResponseMAV(true, job.getJobDownloads(), ""); - } - - - // /** - // * Gets the list of authorised images for the specified job owned by user - // * @param request The request (from a user) making the query - // * @param job The job for which the images will be tested - // * @return - // */ - // private List getImagesForJobAndUser(HttpServletRequest request, VEGLJob job) { - // return getImagesForJobAndUser(request, job.getComputeServiceId()); - // } - - /** - * Gets the list of authorised images for the specified job owned by user - * @param request The request (from a user) making the query - * @param computeServiceId The compute service ID to search for images - * @return - */ - private List getImagesForJobAndUser(HttpServletRequest request, String computeServiceId) { - CloudComputeService ccs = getComputeService(computeServiceId); - if (ccs == null) { - return new ArrayList<>(); - } - - List authorisedImages = new ArrayList<>(); - - for (MachineImage img : ccs.getAvailableImages()) { - if (img instanceof VglMachineImage) { - //If the image has no permission restrictions, add it. Otherwise - //ensure that the user has a role matching something in the image permission list - String[] permissions = ((VglMachineImage) img).getPermissions(); - if (permissions == null) { - authorisedImages.add(img); - } else { - for (String validRole : permissions) { - if (request.isUserInRole(validRole)) { - authorisedImages.add(img); - break; - } - } - } - } else { - authorisedImages.add(img); - } - } - - return authorisedImages; - } - - /** - * Processes a job submission request. - * - * @param request The servlet request - * @param response The servlet response - * - * @return A JSON object with a success attribute that indicates whether - * the job was successfully submitted. - */ - @RequestMapping("/secure/submitJob.do") - public ModelAndView submitJob(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") String jobId) { - PortalUser user = userService.getLoggedInUser(); - boolean succeeded = false; - String oldJobStatus = null, errorDescription = null, errorCorrection = null; - VEGLJob curJob = null; - boolean containsPersistentVolumes = false; - - try { - // Get our job - curJob = attemptGetJob(Integer.parseInt(jobId), user); - if (curJob == null) { - logger.error("Error fetching job with id " + jobId); - errorDescription = "There was a problem retrieving your job from the database."; - String admin = getAdminEmail(); - if(TextUtil.isNullOrEmpty(admin)) { - admin = "the portal admin"; - } - errorCorrection = "Please try again in a few minutes or report it to "+admin+"."; - return generateJSONResponseMAV(false, null, errorDescription, errorCorrection); - } else { - - CloudStorageService cloudStorageService = getStorageService(curJob); - CloudComputeService cloudComputeService = getComputeService(curJob); - if (cloudStorageService == null || cloudComputeService == null) { - errorDescription = "One of the specified storage/compute services cannot be found."; - errorCorrection = "Consider changing the selected compute or storage service."; - return generateJSONResponseMAV(false, null, errorDescription, errorCorrection); - } - - // we need to keep track of old job for audit trail purposes - oldJobStatus = curJob.getStatus(); - // Assume user has permission since we're using images from the SSC - boolean permissionGranted = true; - - // // check to ensure user has permission to run the job - // // boolean permissionGranted = false; - - // String jobImageId = curJob.getComputeVmId(); - // List images = getImagesForJobAndUser(request, curJob); - // for (MachineImage vglMachineImage : images) { - // if (vglMachineImage.getImageId().equals(jobImageId)) { - // permissionGranted = true; - // break; - // } - // } - - if (permissionGranted) { - // Right before we submit - pump out a script file for downloading every VglDownload object when the VM starts - if (!createDownloadScriptFile(curJob, DOWNLOAD_SCRIPT)) { - logger.error(String.format("Error creating download script '%1$s' for job with id %2$s", DOWNLOAD_SCRIPT, jobId)); - errorDescription = "There was a problem configuring the data download script."; - String admin = getAdminEmail(); - if(TextUtil.isNullOrEmpty(admin)) { - admin = "the portal admin"; - } - errorCorrection = "Please try again in a few minutes or report it to "+admin+"."; - } else { - // copy files to S3 storage for processing - // get job files from local directory - StagedFile[] stagedFiles = fileStagingService.listStageInDirectoryFiles(curJob); - if (stagedFiles.length == 0) { - errorDescription = "There wasn't any input files found for submitting your job for processing."; - errorCorrection = "Please upload your input files and try again."; - } else { - // Upload them to storage - File[] files = new File[stagedFiles.length]; - for (int i = 0; i < stagedFiles.length; i++) { - files[i] = stagedFiles[i].getFile(); - } - - cloudStorageService.uploadJobFiles(curJob, files); - - // create our input user data string - String userDataString = null; - userDataString = createBootstrapForJob(curJob); - - // Provenance - anvglProvenanceService.setServerURL(request.getRequestURL().toString()); - anvglProvenanceService.createActivity(curJob, scmEntryService.getJobSolutions(curJob), user); - - //ANVGL-120 Check for persistent volumes - if (cloudComputeService instanceof CloudComputeServiceAws) { - containsPersistentVolumes = ((CloudComputeServiceAws) cloudComputeService).containsPersistentVolumes(curJob); - curJob.setContainsPersistentVolumes(containsPersistentVolumes); - } - - oldJobStatus = curJob.getStatus(); - curJob.setStatus(JobBuilderController.STATUS_PROVISION); - jobManager.saveJob(curJob); - jobManager.createJobAuditTrail(oldJobStatus, curJob, "Set job to provisioning at " + cloudComputeService.getId()); - - cloudSubmissionService.queueSubmission(cloudComputeService, curJob, userDataString); - succeeded = true; - } - } - } else { - errorDescription = "You do not have the permission to submit this job for processing."; - String admin = getAdminEmail(); - if(TextUtil.isNullOrEmpty(admin)) { - admin = "the portal admin"; - } - errorCorrection = "If you think this is wrong, please report it to "+admin+"."; - } - } - } catch (PortalServiceException e) { - errorDescription = e.getMessage(); - errorCorrection = e.getErrorCorrection(); - - //These are our "STS specific" overrides to some error messages (not an ideal solution but I don't want to have to overhaul everything just to tweak a string). - if (errorDescription.equals("Storage credentials are not valid.")) { - errorDescription = "Unable to upload job script and/or input files"; - errorCorrection = "The most likely cause is that your user profile ARN's have been misconfigured."; - } - } catch (IOException e) { - logger.error("Job bootstrap creation failed.", e); - errorDescription = "There was a problem creating startup script."; - errorCorrection = "Please report this error to "+getAdminEmail(); - } catch (AccessDeniedException e) { - logger.error("Job submission failed.", e); - if (curJob == null) { - errorDescription = "You are not authorized to access the specified job"; - } else { - errorDescription = "You are not authorized to access the specified job with id: "+ curJob.getId(); - } - errorCorrection = "Please report this error to "+getAdminEmail(); - } catch (Exception e) { - logger.error("Job submission failed.", e); - errorDescription = "An unexpected error has occurred while submitting your job for processing."; - errorCorrection = "Please report this error to "+getAdminEmail(); - } - - if (succeeded) { - ModelMap responseModel = new ModelMap(); - responseModel.put("containsPersistentVolumes", containsPersistentVolumes); - return generateJSONResponseMAV(true, responseModel, ""); - } else { - jobManager.createJobAuditTrail(oldJobStatus, curJob, errorDescription); - return generateJSONResponseMAV(false, null, errorDescription, errorCorrection); - } - } - - /** - * Creates a new VEGL job initialised with the default configuration values. The job will be persisted into the database. - * - * The Job MUST be associated with a specific compute and storage service. Staging areas and other bits and pieces relating to the job will also be initialised. - * - * @param email - * @return - */ - private VEGLJob initialiseVEGLJob(HttpSession session, PortalUser user) throws PortalServiceException { - VEGLJob job = new VEGLJob(); - - //Start by saving our job to set its ID - jobManager.saveJob(job); - log.debug(String.format("Created a new job row id=%1$s", job.getId())); - - job.setComputeInstanceKey(user.getAwsKeyName()); - job.setStorageBucket(user.getS3Bucket()); - - //Iterate over all session variables - set them up as job parameters - @SuppressWarnings("rawtypes") - final - Enumeration sessionVariables = session.getAttributeNames(); - Map jobParams = new HashMap(); - while (sessionVariables.hasMoreElements()) { - String variableName = sessionVariables.nextElement().toString(); - Object variableValue = session.getAttribute(variableName); - String variableStringValue = null; - ParameterType variableType = null; - - //Only session variables of a number or string will be considered - if (variableValue instanceof Integer || variableValue instanceof Double) { - variableType = ParameterType.number; - variableStringValue = variableValue.toString(); - } else if (variableValue instanceof String) { - variableType = ParameterType.string; - variableStringValue = (String) variableValue; - } else { - continue;//Don't bother with other types - } - - VglParameter param = new VglParameter(); - param.setName(variableName); - param.setValue(variableStringValue); - param.setType(variableType.name()); - jobParams.put(variableName, param); - } - job.setJobParameters(jobParams); - - //Load details from - job.setUser(user.getEmail()); - job.setEmailAddress(user.getEmail()); - // Get keypair name from CloudComputeService later - // job.setComputeInstanceKey("vgl-developers"); - job.setName("VL-Job " + new Date().toString()); - job.setDescription(""); - job.setStatus(STATUS_UNSUBMITTED); - job.setSubmitDate(new Date()); - - // TODO: No longer using session vars - //Transfer the 'session downloads' into actual download objects associated with a job - @SuppressWarnings("unchecked") - final - List erddapDownloads = (List) session.getAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST); - session.setAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST, null); //ensure we clear the list out in case the user makes more jobs - if (erddapDownloads != null) { - job.setJobDownloads(new ArrayList<>(erddapDownloads)); - } else { - logger.warn("No downloads configured for user session!"); - } - - //Save our job to the database before setting up staging directories (we need an ID!!) - jobManager.saveJob(job); - jobManager.createJobAuditTrail(null, job, "Job created."); - - //Finally generate our stage in directory for persisting inputs - fileStagingService.generateStageInDirectory(job); - - return job; - } - - /** - * This function creates a file "vgl_download.sh" which contains the bash script - * for downloading every VglDownload associated with the specified job. - * - * The script file will be written to the staging area for job as - * @param job The job to generate - * @param fileName the file name of the generated script - * @return - */ - private boolean createDownloadScriptFile(VEGLJob job, String fileName) { - try (OutputStream os = fileStagingService.writeFile(job, fileName); - OutputStreamWriter out = new OutputStreamWriter(os)) { - - for (VglDownload dl : job.getJobDownloads()) { - out.write(String.format("#Downloading %1$s\n", dl.getName())); - out.write(String.format("curl -f -L '%1$s' -o \"%2$s\"\n", dl.getUrl(), dl.getLocalPath())); - } - - return true; - } catch (Exception e) { - logger.error("Error creating download script" + e.getMessage()); - logger.debug("Error:", e); - return false; - } - } - - /** - * Request wrapper to get the default toolbox uri. - * - */ - @RequestMapping("/getDefaultToolbox.do") - public ModelAndView doGetDefaultToolbox() { - return generateJSONResponseMAV(true, new String[] {getDefaultToolbox()}, ""); - } - - /** - * Gets the set of cloud images available for use by a particular user. - * - * If jobId is specified, limit the set to images that are - * compatible with the solution selected for the job. - * - * If a list of solution ids is provided, limit the set of images to the - * ones that can run a job comprising the specified solutions. - * - * If both jobId and solutions are specified, use the list of solutions to - * determine which images to return. - * - * @param request - * @param computeServiceId - * @param jobId (optional) id of a job to limit suitable images - * @return - */ - @RequestMapping("/secure/getVmImagesForComputeService.do") - public ModelAndView getImagesForComputeService( - HttpServletRequest request, - @RequestParam("computeServiceId") String computeServiceId, - @RequestParam(value="jobId", required=false) Integer jobId, - @RequestParam(value="solutions", required=false) List solutions) { - PortalUser user = userService.getLoggedInUser(); - try { - Map> imageProviders = null; - - if (solutions != null && solutions.size() > 0) { - imageProviders = scmEntryService.getJobImages(solutions, user); - } - else if (jobId != null) { - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false); - } - - // Filter list to images suitable for job solutions, if specified. - imageProviders = scmEntryService.getJobImages(job, user); - } - - Set images = null; - - if (imageProviders != null) { - images = imageProviders.get(computeServiceId); - } - - if(images == null) { - images= new HashSet(); - } - - if (images.isEmpty()) { - // Fall back on old behaviour based on configured images for now - // Get images available to the current user - images.addAll(getImagesForJobAndUser(request, computeServiceId)); - } - - if (images.isEmpty()) { - // There are no suitable images at the specified compute service. - log.warn("No suitable images at compute service (" + computeServiceId + ") for job (" + jobId + ")"); - } - - // return result - return generateJSONResponseMAV(true, images, ""); - } catch (Exception ex) { - log.error("Unable to access image list:" + ex.getMessage(), ex); - return generateJSONResponseMAV(false); - } - } - - /** - * Return a JSON list of VM types available for the compute service. - * - * @param computeServiceId - */ - @RequestMapping("/secure/getVmTypesForComputeService.do") - public ModelAndView getTypesForComputeService(HttpServletRequest request, - @RequestParam("computeServiceId") String computeServiceId, - @RequestParam("machineImageId") String machineImageId) { - try { - CloudComputeService ccs = getComputeService(computeServiceId); - if (ccs == null) { - return generateJSONResponseMAV(false, null, "Unknown compute service: "+computeServiceId); - } - - ComputeType[] allTypes = ccs.getAvailableComputeTypes(machineImageId); - - return generateJSONResponseMAV(true, allTypes, ""); - } catch (Exception ex) { - log.error("Unable to access compute type list:" + ex.getMessage(), ex); - return generateJSONResponseMAV(false); - } - } - - /** - * Gets a JSON list of id/name pairs for every available compute service - * - * If a jobId parameter is provided, then return compute services - * compatible with that job. Currently that is only those services - * that have images available for the solution used for the job. - * - * Compute services that haven't been configured by the user will be ignored - * - * @param jobId (optional) job id to limit acceptable services - * @return - * @throws PortalServiceException - */ - @RequestMapping("/secure/getComputeServices.do") - public ModelAndView getComputeServices(@RequestParam(value="jobId", required=false) final Integer jobId) throws PortalServiceException { - - PortalUser user = userService.getLoggedInUser(); - Set jobCCSIds; - try { - jobCCSIds = scmEntryService.getJobProviders(jobId, user); - } catch (AccessDeniedException e) { - throw e; - } - - Set configuredServiceIds = new HashSet(); - getConfiguredComputeServices(user, nciDetailsService).stream().forEach(x -> configuredServiceIds.add(x.getId())); - - List simpleComputeServices = new ArrayList<>(); - - for (CloudComputeService ccs : cloudComputeServices) { - // Add the ccs to the list if it's valid for job or we have no job - if (jobCCSIds == null || jobCCSIds.contains(ccs.getId())) { - if (configuredServiceIds.contains(ccs.getId())) { - ModelMap map = new ModelMap(); - map.put("id", ccs.getId()); - map.put("name", ccs.getName()); - simpleComputeServices.add(map); - } - } - } - - return generateJSONResponseMAV(true, simpleComputeServices, ""); - } - - /** - * Gets a JSON list of id/name pairs for every available storage service - * @return - */ - @RequestMapping("/secure/getStorageServices.do") - public ModelAndView getStorageServices() { - List simpleStorageServices = new ArrayList<>(); - - for (CloudStorageService ccs : cloudStorageServices) { - ModelMap map = new ModelMap(); - map.put("id", ccs.getId()); - map.put("name", ccs.getName()); - simpleStorageServices.add(map); - } - - return generateJSONResponseMAV(true, simpleStorageServices, ""); - } - - /** - * A combination of getJobInputFiles and getJobDownloads. This function amalgamates the list - * of remote service downloads and local file uploads into a single list of input files that - * are available to a job at startup. - * - * The results will be presented in an array of VglDownload objects - * @param jobId - * @return - */ - @RequestMapping("/secure/getAllJobInputs.do") - public ModelAndView getAllJobInputs(@RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = null; - try { - job = attemptGetJob(jobId, user); - } catch (Exception ex) { - logger.error("Error fetching job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, "Error fetching job with id " + jobId); - } - - if (job == null) { - return generateJSONResponseMAV(false); - } - - //Get our files - StagedFile[] files = null; - try { - files = fileStagingService.listStageInDirectoryFiles(job); - } catch (Exception ex) { - logger.error("Error listing job stage in directory", ex); - return generateJSONResponseMAV(false, null, "Error reading job stage in directory"); - } - - //Load the staged files - List allInputs = new ArrayList<>(); - int idCounter = Integer.MIN_VALUE; - for (StagedFile file : files) { - //we need unique ids - this is our simple way of generating them (low likelyhood of collision) - //if we have a collision the GUI might not show one entry - it's not the end of the world - VglDownload dl = new VglDownload(idCounter++); - dl.setName(file.getName()); - dl.setLocalPath(file.getName()); - - allInputs.add(dl); - } - - //Load the job downloads - allInputs.addAll(job.getJobDownloads()); - - return generateJSONResponseMAV(true, allInputs, ""); - } - - - /** - * Gets a JSON list of id/name pairs for every available compute service for the - * given list of solution ids. - * - * Only those services that have images available for the solutions. - * - * Compute services that haven't been configured by the user will be ignored - * - * @param solutions List of solution ids for which the available compute services is to be returned - * - * @return List of compute services which have images for the given solutions - * - * @throws PortalServiceException - */ - @RequestMapping("/secure/getComputeServicesForSolutions.do") - public ModelAndView getComputeServicesForSolutions(@RequestParam(value="solutions", required=false) List solutions) - throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - - - Set configuredServiceIds = new HashSet(); - getConfiguredComputeServices(user, nciDetailsService).stream().forEach(x -> configuredServiceIds.add(x.getId())); - - List simpleComputeServices = new ArrayList<>(); - - if (solutions!=null && !solutions.isEmpty()) { - try { - Set ccsIds = scmEntryService.getJobProviders(solutions); - if (ccsIds != null) { - for (CloudComputeService ccs : cloudComputeServices) { - // Add the ccs to the list if it's valid for job or we have no job - if (ccsIds.contains(ccs.getId()) && configuredServiceIds.contains(ccs.getId())) { - ModelMap map = new ModelMap(); - map.put("id", ccs.getId()); - map.put("name", ccs.getName()); - simpleComputeServices.add(map); - } - } - } - } catch (AccessDeniedException e) { - throw e; - } - } - - return generateJSONResponseMAV(true, simpleComputeServices, ""); - } - - @ExceptionHandler(AccessDeniedException.class) - @ResponseStatus(value = org.springframework.http.HttpStatus.FORBIDDEN) - public @ResponseBody String handleException(AccessDeniedException e) { - return e.getMessage(); - } - - /** - * Return the default toolbox URI. - * - * @returns String with the URI for the default toolbox. - */ - public String getDefaultToolbox() { - return this.defaultToolbox; - } - - /** - * Set the default toolbox URI. - * - * @param defaultToolbox String containing the URI of the default toolbox to set - */ - public void setDefaultToolbox(String defaultToolbox) { - this.defaultToolbox = defaultToolbox; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/controllers/JobDownloadController.java b/src/main/java/org/auscope/portal/server/web/controllers/JobDownloadController.java deleted file mode 100644 index 6cdca0c98..000000000 --- a/src/main/java/org/auscope/portal/server/web/controllers/JobDownloadController.java +++ /dev/null @@ -1,455 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.awt.Dimension; -import java.util.ArrayList; -import java.util.List; - -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.server.controllers.BasePortalController; -import org.auscope.portal.core.services.WCSService; -import org.auscope.portal.core.services.methodmakers.filter.FilterBoundingBox; -import org.auscope.portal.core.services.responses.csw.CSWGeographicBoundingBox; -import org.auscope.portal.core.services.responses.wcs.Resolution; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.web.service.SimpleWfsService; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Controller; -import org.springframework.ui.ModelMap; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.servlet.ModelAndView; - -/** - * A controller class for handling the creation of ERRDAP and other download requests - * @author Josh Vote - * - */ -@Controller -public class JobDownloadController extends BasePortalController { - - /** - * Name of the session variable where a List resides - */ - public static final String SESSION_DOWNLOAD_LIST = "jobDownloadList"; - - - protected final Log logger = LogFactory.getLog(getClass()); - private SimpleWfsService simpleWfsService; - private WCSService wcsService; - - - private String erddapServiceUrl; - - @Autowired - public JobDownloadController(SimpleWfsService simpleWfsService, WCSService wcsService, @Value("${cloud.erddapservice.url}") String erddapServiceUrl) { - this.simpleWfsService = simpleWfsService; - this.wcsService = wcsService; - this.erddapServiceUrl=erddapServiceUrl; - } - - private static ModelMap toView(VglDownload dl) { - ModelMap map = new ModelMap(); - map.put("url", dl.getUrl()); - map.put("northBoundLatitude", dl.getNorthBoundLatitude()); - map.put("southBoundLatitude", dl.getSouthBoundLatitude()); - map.put("eastBoundLongitude", dl.getEastBoundLongitude()); - map.put("westBoundLongitude", dl.getWestBoundLongitude()); - map.put("name", dl.getName()); - map.put("description", dl.getDescription()); - map.put("localPath", dl.getLocalPath()); - return map; - } - - /** - * // TODO: No longer using session vars - * - * Utility for adding a single VglDownload object to the session based array of VglDownload objects. - * @param request - * @param download - */ - private void addDownloadToSession(HttpServletRequest request, VglDownload download) { - @SuppressWarnings("unchecked") - List erddapUrlList = (List) request.getSession().getAttribute(SESSION_DOWNLOAD_LIST); - if (erddapUrlList == null) { - erddapUrlList = new ArrayList<>(); - } - - logger.trace("Adding download: " + download.getUrl()); - synchronized(erddapUrlList) { - erddapUrlList.add(download); - } - - request.getSession().setAttribute(SESSION_DOWNLOAD_LIST, erddapUrlList); - } - - /** - * Creates a new VL Download object from a remote URL. The Download object is returned. If saveSession - * is true the download object will also be saved to the session wide SESSION_DOWNLOAD_LIST list. - * @return - */ - @RequestMapping("/makeDownloadUrl.do") - public ModelAndView makeDownloadUrl(@RequestParam("url") String url, - @RequestParam("name") String name, - @RequestParam("description") String description, - @RequestParam(required = false, value = "fullDescription") final String fullDescription, - @RequestParam("localPath") String localPath, - @RequestParam("northBoundLatitude") final Double northBoundLatitude, - @RequestParam("eastBoundLongitude") final Double eastBoundLongitude, - @RequestParam("southBoundLatitude") final Double southBoundLatitude, - @RequestParam("westBoundLongitude") final Double westBoundLongitude, - //@RequestParam(required = false, value = "parentName") String parentName, - //@RequestParam(required = false, value = "parentUrl") String parentUrl, - //@RequestParam(required = false, value = "owner") String owner, - @RequestParam(required=false,defaultValue="false",value="saveSession") final boolean saveSession, - HttpServletRequest request) { - - VglDownload newDownload = new VglDownload(); - newDownload.setName(name); - newDownload.setDescription(fullDescription == null ? description : fullDescription); - newDownload.setLocalPath(localPath); - newDownload.setUrl(url); - newDownload.setNorthBoundLatitude(northBoundLatitude); - newDownload.setEastBoundLongitude(eastBoundLongitude); - newDownload.setSouthBoundLatitude(southBoundLatitude); - newDownload.setWestBoundLongitude(westBoundLongitude); - /* - newDownload.setOwner(owner); - newDownload.setParentName(parentName); - newDownload.setParentUrl(parentUrl); - */ - - if (saveSession) { - addDownloadToSession(request, newDownload); - } - - return generateJSONResponseMAV(true, toView(newDownload), ""); - } - - /** - * Creates a new VL Download object from a some ERDDAP parameters. The Download object is returned. If saveSession - * is true the download object will also be saved to the session wide SESSION_DOWNLOAD_LIST list. - * @return - * @throws Exception - */ - @RequestMapping("/makeErddapUrl.do") - public ModelAndView makeErddapUrl(@RequestParam("northBoundLatitude") final Double northBoundLatitude, - @RequestParam("eastBoundLongitude") final Double eastBoundLongitude, - @RequestParam("southBoundLatitude") final Double southBoundLatitude, - @RequestParam("westBoundLongitude") final Double westBoundLongitude, - @RequestParam("format") final String format, - @RequestParam("layerName") final String layerName, - @RequestParam("name") final String name, - @RequestParam("description") final String description, - @RequestParam(required = false, value = "fullDescription") final String fullDescription, - @RequestParam("localPath") final String localPath, - //@RequestParam(required = false, value = "parentName") String parentName, - //@RequestParam(required = false, value = "parentUrl") String parentUrl, - //@RequestParam(required = false, value = "owner") String owner, - @RequestParam(required=false,defaultValue="false",value="saveSession") final boolean saveSession, - HttpServletRequest request, - HttpServletResponse response) { - - String serviceUrl = erddapServiceUrl; - CSWGeographicBoundingBox bbox = new CSWGeographicBoundingBox(westBoundLongitude, eastBoundLongitude, southBoundLatitude, northBoundLatitude); - String erddapUrl = getCoverageSubsetUrl(bbox, serviceUrl, layerName, format); - - // Append this download list to the existing list of download objects - VglDownload newDownload = new VglDownload(); - newDownload.setName(name); - newDownload.setDescription(fullDescription == null ? description : fullDescription); - newDownload.setLocalPath(localPath); - newDownload.setUrl(erddapUrl); - newDownload.setNorthBoundLatitude(northBoundLatitude); - newDownload.setEastBoundLongitude(eastBoundLongitude); - newDownload.setSouthBoundLatitude(southBoundLatitude); - newDownload.setWestBoundLongitude(westBoundLongitude); - /* - newDownload.setOwner(owner); - newDownload.setParentName(parentName); - newDownload.setParentUrl(parentUrl); - */ - - if (saveSession) { - addDownloadToSession(request, newDownload); - } - - return generateJSONResponseMAV(true, toView(newDownload), ""); - } - - /** - * Creates a new VGL Download object from some NCSS parameters. The Download object is returned. If saveSession - * is true the download object will also be saved to the session wide SESSION_DOWNLOAD_LIST list. - * @return - * @throws Exception - */ - - @RequestMapping("/makeNetcdfsubseserviceUrl.do") - public ModelAndView makeNetcdfsubsetserviceUrl(@RequestParam("url") String url, - @RequestParam("northBoundLatitude") final Double northBoundLatitude, - @RequestParam("eastBoundLongitude") final Double eastBoundLongitude, - @RequestParam("southBoundLatitude") final Double southBoundLatitude, - @RequestParam("westBoundLongitude") final Double westBoundLongitude, - @RequestParam("name") final String name, - @RequestParam("description") final String description, - @RequestParam(required = false, value = "fullDescription") final String fullDescription, - @RequestParam("localPath") final String localPath, - //@RequestParam(required = false, value = "parentName") String parentName, - //@RequestParam(required = false, value = "parentUrl") String parentUrl, - //@RequestParam(required = false, value = "owner") String owner, - @RequestParam(required=false,defaultValue="false",value="saveSession") final boolean saveSession, - HttpServletRequest request, - HttpServletResponse response) { - - //String serviceUrl = hostConfigurer.resolvePlaceholder("HOST.erddapservice.url"); - CSWGeographicBoundingBox bbox = new CSWGeographicBoundingBox(westBoundLongitude, eastBoundLongitude, southBoundLatitude, northBoundLatitude); - String netcdfsubsetserviceUrl = getNetcdfSubsetUrl(bbox, url, name, description, "nc"); - - // Append this download list to the existing list of download objects - VglDownload newDownload = new VglDownload(); - newDownload.setName(name); - newDownload.setDescription(fullDescription == null ? description : fullDescription); - newDownload.setLocalPath(localPath); - newDownload.setUrl(netcdfsubsetserviceUrl); - newDownload.setNorthBoundLatitude(northBoundLatitude); - newDownload.setEastBoundLongitude(eastBoundLongitude); - newDownload.setSouthBoundLatitude(southBoundLatitude); - newDownload.setWestBoundLongitude(westBoundLongitude); - /* - newDownload.setOwner(owner); - newDownload.setParentName(parentName); - newDownload.setParentUrl(parentUrl); - */ - - if (saveSession) { - addDownloadToSession(request, newDownload); - } - - return generateJSONResponseMAV(true, toView(newDownload), ""); - } - - - /** - * Creates a new VL Download object from some WFS parameters. The Download object is returned. If saveSession - * is true the download object will also be saved to the session wide SESSION_DOWNLOAD_LIST list. - * - * @param serviceUrl The WFS endpoint - * @param featureType The feature type name to query - * @param maxFeatures [Optional] The maximum number of features to query - */ - @RequestMapping("/makeWfsUrl.do") - public ModelAndView makeWfsUrl(@RequestParam("serviceUrl") final String serviceUrl, - @RequestParam("featureType") final String featureType, - @RequestParam(required = false, value = "srsName") final String srsName, - @RequestParam(required = false, value = "crs") final String bboxCrs, - @RequestParam(required = false, value = "northBoundLatitude") final Double northBoundLatitude, - @RequestParam(required = false, value = "southBoundLatitude") final Double southBoundLatitude, - @RequestParam(required = false, value = "eastBoundLongitude") final Double eastBoundLongitude, - @RequestParam(required = false, value = "westBoundLongitude") final Double westBoundLongitude, - @RequestParam(required = false, value = "outputFormat") final String outputFormat, - @RequestParam(required = false, value = "maxFeatures") Integer maxFeatures, - @RequestParam("name") final String name, - @RequestParam("description") final String description, - @RequestParam(required = false, value = "fullDescription") final String fullDescription, - @RequestParam("localPath") final String localPath, - //@RequestParam(required = false, value = "parentName") String parentName, - //@RequestParam(required = false, value = "parentUrl") String parentUrl, - //@RequestParam(required = false, value = "owner") String owner, - @RequestParam(required=false,defaultValue="false",value="saveSession") final boolean saveSession, - HttpServletRequest request) { - - FilterBoundingBox bbox = null; - if (northBoundLatitude != null) { - bbox = FilterBoundingBox.parseFromValues(bboxCrs, northBoundLatitude, southBoundLatitude, eastBoundLongitude, westBoundLongitude); - } - - String response = null; - - try { - response = simpleWfsService.getFeatureRequestAsString(serviceUrl, featureType, bbox, maxFeatures, srsName, outputFormat); - } catch (Exception ex) { - log.warn(String.format("Exception generating service request for '%2$s' from '%1$s': %3$s", serviceUrl, featureType, ex)); - log.debug("Exception: ", ex); - return generateExceptionResponse(ex, serviceUrl); - } - - VglDownload newDownload = new VglDownload(); - newDownload.setName(name); - newDownload.setDescription(fullDescription == null ? description : fullDescription); - newDownload.setLocalPath(localPath); - newDownload.setUrl(response); - newDownload.setNorthBoundLatitude(northBoundLatitude); - newDownload.setEastBoundLongitude(eastBoundLongitude); - newDownload.setSouthBoundLatitude(southBoundLatitude); - newDownload.setWestBoundLongitude(westBoundLongitude); - /* - newDownload.setOwner(owner); - newDownload.setParentName(parentName); - newDownload.setParentUrl(parentUrl); - */ - - if (saveSession) { - addDownloadToSession(request, newDownload); - } - - return generateJSONResponseMAV(true, toView(newDownload), ""); - } - - /** - * Creates a new VL Download object from some WCS parameters. The Download object is returned. If saveSession - * is true the download object will also be saved to the session wide SESSION_DOWNLOAD_LIST list. - * - * TODO: Do we need time constraint (TimeConstraint) and custom params (Map)? - * - * @param serviceUrl The WCS endpoint - * @param coverageName - * @param format - * @param outputCrs - * @param outputWidth - * @param outputHeight - * @param outputResolutionX - * @param outputResolutionY - * @param inputCrs - * @param northBoundLatitude - * @param southBoundLatitude - * @param eastBoundLongitude - * @param westBoundLongitude - * @param name - * @param description - * @param fullDescription - * @param localPath - * @param saveSession - * @param request - * @return - */ - @RequestMapping("/makeWcsUrl.do") - public ModelAndView makeWcsUrl(@RequestParam("serviceUrl") final String serviceUrl, - @RequestParam("coverageName") final String coverageName, - @RequestParam(required = false, value = "format") final String format, - @RequestParam(required = false, value = "crs") final String inputCrs, - @RequestParam(required = false, value = "outputCrs") final String outputCrs, - @RequestParam(required = false, value = "outputWidth") final Integer outputWidth, - @RequestParam(required = false, value = "outputHeight") final Integer outputHeight, - @RequestParam(required = false, value = "outputResolutionX") final Double outputResolutionX, - @RequestParam(required = false, value = "outputResolutionY") final Double outputResolutionY, - @RequestParam(required = false, value = "northBoundLatitude") final Double northBoundLatitude, - @RequestParam(required = false, value = "southBoundLatitude") final Double southBoundLatitude, - @RequestParam(required = false, value = "eastBoundLongitude") final Double eastBoundLongitude, - @RequestParam(required = false, value = "westBoundLongitude") final Double westBoundLongitude, - @RequestParam("name") final String name, - @RequestParam("description") final String description, - @RequestParam(required = false, value = "fullDescription") final String fullDescription, - @RequestParam("localPath") final String localPath, - @RequestParam(required=false,defaultValue="false",value="saveSession") final boolean saveSession, - HttpServletRequest request) { - - Dimension outputSize = null; - if(outputWidth != null && outputHeight != null) { - outputSize = new Dimension(outputWidth, outputHeight); - } - - Resolution outputResolution = null; - if(outputResolutionX != null && outputResolutionY != null) { - outputResolution = new Resolution(outputResolutionX, outputResolutionY); - } - - CSWGeographicBoundingBox bbox = null; - if (northBoundLatitude != null) { - bbox = new CSWGeographicBoundingBox(westBoundLongitude, eastBoundLongitude, southBoundLatitude, northBoundLatitude); - } - - String response = null; - try { - response = wcsService.getCoverageRequestAsString(serviceUrl, coverageName, format, outputCrs, outputSize, outputResolution, inputCrs, bbox, null, null); - } catch (Exception ex) { - log.warn(String.format("Exception generating service request for '%2$s' from '%1$s': %3$s", serviceUrl, coverageName, ex)); - log.debug("Exception: ", ex); - return generateExceptionResponse(ex, serviceUrl); - } - - VglDownload newDownload = new VglDownload(); - newDownload.setName(name); - newDownload.setDescription(fullDescription == null ? description : fullDescription); - newDownload.setLocalPath(localPath); - newDownload.setUrl(response); - newDownload.setNorthBoundLatitude(northBoundLatitude); - newDownload.setEastBoundLongitude(eastBoundLongitude); - newDownload.setSouthBoundLatitude(southBoundLatitude); - newDownload.setWestBoundLongitude(westBoundLongitude); - - if (saveSession) { - addDownloadToSession(request, newDownload); - } - - return generateJSONResponseMAV(true, toView(newDownload), ""); - } - - /** - * Get the number of download requests stored in user session. This method - * will be used by VL frontend to check if any data set has been captured - * before creating a new job. - * - * @param request The servlet request with query parameters - * @return number of download requests in user session. - */ - @RequestMapping("/getNumDownloadRequests.do") - public ModelAndView getNumDownloadRequests(HttpServletRequest request) { - int size = 0; - List downloadList = (List)request.getSession().getAttribute(SESSION_DOWNLOAD_LIST); - if (downloadList != null && downloadList.size() > 0) { - size = downloadList.size(); - } - return generateJSONResponseMAV(true, size, ""); - } - - /** - * Takes the co-ordinates of a user drawn bounding box and constructs an ERDDAP - * coverage subset request URL. - * - * @param coords The lat/lon co-ordinates of the user drawn bounding box - * @param serviceUrl The remote URL to query - * @param layerName The coverage layername to request - * @return The ERDDAP coverage subset request URL - */ - private String getCoverageSubsetUrl(CSWGeographicBoundingBox bbox, String serviceUrl, String layerName, String format) { - logger.debug(String.format("serviceUrl='%1$s' bbox='%2$s' layerName='%3$s'", serviceUrl, bbox, layerName)); - - // convert bbox co-ordinates to ERDDAP an ERDDAP dimension string - String erddapDimensions = "%5B("+ bbox.getSouthBoundLatitude() +"):1:("+ bbox.getNorthBoundLatitude() + - ")%5D%5B("+ bbox.getWestBoundLongitude() +"):1:("+ bbox.getEastBoundLongitude() +")%5D"; - - String url = serviceUrl + layerName + "." + format + "?" + layerName + erddapDimensions; - - return url; - } - - /** - * Takes the co-ordinates of a user drawn bounding box and constructs an Netcdf - * subset request URL. - * - * @param coords The lat/lon co-ordinates of the user drawn bounding box - * @param serviceUrl The remote URL to query - * @param layerName The coverage layername to request - * @return The NCSS subset request URL - */ - private String getNetcdfSubsetUrl(CSWGeographicBoundingBox bbox, String serviceUrl, String name, String description, String format) { - logger.debug(String.format("serviceUrl='%1$s' bbox='%2$s' layerName='%3$s' layerDescription='%4$s'", serviceUrl, bbox, name, description)); - - // convert bbox co-ordinates to an netcdfsubsetservice dimension string - String netcdfsubsetserviceDimensions = "&spatial=bb" + - "&north="+ bbox.getNorthBoundLatitude() + - "&south=" + bbox.getSouthBoundLatitude() + - "&west=" + bbox.getWestBoundLongitude() + - "&east="+ bbox.getEastBoundLongitude(); - String otherParams = "";//"&temporal=all&time_start=&time_end=&horizStride="; - - String url = serviceUrl + "?var=" + name + netcdfsubsetserviceDimensions + otherParams; - - return url; - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/controllers/JobListController.java b/src/main/java/org/auscope/portal/server/web/controllers/JobListController.java deleted file mode 100644 index d32da62e5..000000000 --- a/src/main/java/org/auscope/portal/server/web/controllers/JobListController.java +++ /dev/null @@ -1,1281 +0,0 @@ -/* - * This file is part of the AuScope Virtual Rock Lab (VRL) project. - * Copyright (c) 2009 ESSCC, The University of Queensland - * - * Licensed under the terms of the GNU Lesser General Public License. - */ -package org.auscope.portal.server.web.controllers; - -import java.io.BufferedInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import java.util.zip.ZipOutputStream; -import java.nio.charset.StandardCharsets; - -import jakarta.servlet.ServletOutputStream; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; - -import org.apache.http.HttpStatus; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudDirectoryInformation; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.services.cloud.FileStagingService; -import org.auscope.portal.core.services.cloud.monitor.JobStatusException; -import org.auscope.portal.core.services.cloud.monitor.JobStatusMonitor; -import org.auscope.portal.core.util.FileIOUtil; -import org.auscope.portal.core.util.TextUtil; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.vegl.VGLJobAuditLog; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.CloudSubmissionService; -import org.auscope.portal.server.web.service.VGLJobAuditLogService; -import org.json.JSONArray; -import org.json.JSONObject; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.stereotype.Controller; -import org.springframework.ui.ModelMap; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.servlet.ModelAndView; - - -/** - * Controller for the job list view. - * - * @author Cihan Altinay - * @author Abdi Jama - * @author Josh Vote - * @author Richard Goh - */ -@Controller -public class JobListController extends BaseCloudController { - - /** The name of the log file that the job will use*/ - public static final String VL_LOG_FILE = "vl.sh.log"; - - /** The name of the termination file that the job will use*/ - public static final String VL_TERMINATION_FILE = "vl.end"; - - /** Logger for this class */ - private final Log logger = LogFactory.getLog(getClass()); - - private FileStagingService fileStagingService; - private VGLJobStatusAndLogReader jobStatusLogReader; - private JobStatusMonitor jobStatusMonitor; - private CloudSubmissionService cloudSubmissionService; - private VGLJobAuditLogService jobAuditLogService; - private PortalUserService userService; - - private String adminEmail=null; - - - /** - * @return the adminEmail - */ - public String getAdminEmail() { - return adminEmail; - } - - /** - * @param adminEmail the adminEmail to set - */ - public void setAdminEmail(String adminEmail) { - this.adminEmail = adminEmail; - } - - @Autowired - public JobListController(VEGLJobManager jobManager, CloudStorageService[] cloudStorageServices, - FileStagingService fileStagingService, CloudComputeService[] cloudComputeServices, - PortalUserService userService, VGLJobStatusAndLogReader jobStatusLogReader, - JobStatusMonitor jobStatusMonitor, - @Value("${cloud.vm.sh}") String vmSh, @Value("${cloud.vm-shutdown.sh}") String vmShutdownSh, - @Value("${portalAdminEmail}") String adminEmail, - CloudSubmissionService cloudSubmissionService, - VGLJobAuditLogService jobAuditLogService) { - super(cloudStorageServices, cloudComputeServices, jobManager,vmSh,vmShutdownSh); - this.jobManager = jobManager; - this.fileStagingService = fileStagingService; - this.jobStatusLogReader = jobStatusLogReader; - this.jobStatusMonitor = jobStatusMonitor; - this.adminEmail=adminEmail; - this.userService = userService; - this.cloudSubmissionService = cloudSubmissionService; - this.jobAuditLogService = jobAuditLogService; - } - -// /** -// * Returns a JSON object containing a list of the current user's series. -// * -// * @param request The servlet request -// * @param response The servlet response -// * -// * @return A JSON object with a series attribute which is an array of -// * VEGLSeries objects. -// */ -// @RequestMapping("/secure/mySeries.do") -// public ModelAndView mySeries(HttpServletRequest request, -// HttpServletResponse response, -// @AuthenticationPrincipal ANVGLUser user) { -// -// if (user == null || user.getEmail() == null) { -// logger.warn("No email attached to session"); -// return generateJSONResponseMAV(false, null, "No email attached to session"); -// } -// List series = jobManager.querySeries(user.getEmail(), null, null); -// -// logger.debug("Returning " + series); -// return generateJSONResponseMAV(true, series, ""); -// } - - /** - * Delete the job given by its reference. - * - * @param request The servlet request including a jobId parameter - * @param response The servlet response - * - * @return A JSON object with a success attribute and an error attribute - * in case the job was not found or can not be deleted. - */ - @RequestMapping("/secure/deleteJob.do") - public ModelAndView deleteJob(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - logger.info("Deleting job with ID " + jobId); - - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The requested job was not found."); - } - - String oldJobStatus = job.getStatus(); - - //Always cleanup our compute resources (if there are any) - terminateInstance(job, false); - - job.setStatus(JobBuilderController.STATUS_DELETED); - jobManager.saveJob(job); - jobManager.createJobAuditTrail(oldJobStatus, job, "Job deleted."); - - // Failure here is NOT fatal - it will just result in some - // residual files in staging directory and S3 cloud storage. - cleanupDeletedJob(job); - - return generateJSONResponseMAV (true, null, ""); - } - - /** - * delete all jobs of given series (and the series itself) - * - * @param request The servlet request including a seriesId parameter - * @param response The servlet response - * - * @return A JSON object with a success attribute and an error attribute - * in case the series was not found in the job manager. - * @throws PortalServiceException - */ - @RequestMapping("/secure/deleteSeriesJobs.do") - public ModelAndView deleteSeriesJobs(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("seriesId") Integer seriesId) throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - VEGLSeries series = attemptGetSeries(seriesId, user); - if (series == null) { - return generateJSONResponseMAV(false); - } - - if (!series.getUser().equals(user.getEmail())) { - return generateJSONResponseMAV(false); - } - - List jobs = jobManager.getSeriesJobs(seriesId.intValue(), user); - if (jobs == null) { - logger.warn(String.format("Unable to lookup jobs for series id '%1$s'", seriesId)); - return generateJSONResponseMAV(false, null, "Unable to lookup jobs of series."); - } - - logger.info("Deleting jobs of series " + seriesId); - for (VEGLJob job : jobs) { - logger.debug(String.format("Deleting job %1$s",job)); - String oldJobStatus = job.getStatus(); - job.setStatus(JobBuilderController.STATUS_DELETED); - jobManager.saveJob(job); - jobManager.createJobAuditTrail(oldJobStatus, job, "Job deleted."); - // Failure here is NOT fatal - it will just result in some - // residual files in staging directory and S3 cloud storage. - cleanupDeletedJob(job); - } - - logger.info("Deleting series "+seriesId); - jobManager.deleteSeries(series); - - return generateJSONResponseMAV(true, null, ""); - } - - /** - * Cleans up all the files for a deleted job from staging directory - * and S3 cloud storage. Failure in cleaning up will not be propagated - * back to the calling method. - * @param job the job to be deleted. - */ - private void cleanupDeletedJob(VEGLJob job) { - try { - // Remove files from staging directory - fileStagingService.deleteStageInDirectory(job); - // Remove files from S3 cloud storage if the job - // hasn't been registered in GeoNetwork - if (StringUtils.isEmpty(job.getRegisteredUrl())) { - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cloud files (if any) will not be removed", job.getStorageServiceId(), job.getId())); - } else { - cloudStorageService.deleteJobFiles(job); - } - } - } catch (Exception ex) { - logger.warn("Error cleaning up deleted job.", ex); - } - } - - /** - * Kills or cancels the job given by its reference. - * - * @param request The servlet request including a jobId parameter - * @param response The servlet response - * - * @return A JSON object with a success attribute and an error attribute - * in case the job was not found in the job manager. - */ - @RequestMapping("/secure/killJob.do") - public ModelAndView killJob(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - logger.info("Cancelling job with ID "+jobId); - - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup job to kill."); - } - - try { - // we need to inform the user that the job cancelling is aborted - // because the job has already been processed. - if (job.getStatus().equals(JobBuilderController.STATUS_DONE)) { - return generateJSONResponseMAV(false, null, "Cancelling of job aborted as it has already been processed."); - } - - // terminate the EMI instance - terminateInstance(job, true); - } catch (Exception e) { - logger.error("Failed to cancel the job.", e); - String admin = getAdminEmail(); - if(TextUtil.isNullOrEmpty(admin)) { - admin = "the portal admin"; - } - String errorCorrection = "Please try again in a few minutes or report it to "+admin+"."; - - return generateJSONResponseMAV(false, null, "There was a problem cancelling your job.", - errorCorrection); - } - - return generateJSONResponseMAV(true, null, ""); - } - - /** - * Terminates the instance of an EMI that was launched by a job. - * - * @param request The HttpServletRequest - * @param job The job linked the to instance that is to be terminated - */ - private void terminateInstance(VEGLJob job, boolean includeAuditTrail) { - String oldJobStatus = job.getStatus(); - if (oldJobStatus.equals(JobBuilderController.STATUS_DONE) || - oldJobStatus.equals(JobBuilderController.STATUS_UNSUBMITTED)) { - logger.debug("Skipping finished or unsubmitted job "+job.getId()); - }else if(oldJobStatus.equals(JobBuilderController.STATUS_INQUEUE)){ - cloudSubmissionService.dequeueSubmission(job, getComputeService(job)); - - if (includeAuditTrail) { - job.setStatus(JobBuilderController.STATUS_UNSUBMITTED); - jobManager.saveJob(job); - jobManager.createJobAuditTrail(oldJobStatus, job, "Job cancelled by user."); - } - }else { - try { - // We allow the job to be cancelled and re-submitted regardless - // of its termination status. - if (includeAuditTrail) { - job.setStatus(JobBuilderController.STATUS_UNSUBMITTED); - jobManager.saveJob(job); - jobManager.createJobAuditTrail(oldJobStatus, job, "Job cancelled by user."); - } - CloudComputeService cloudComputeService = getComputeService(job); - if (cloudComputeService == null) { - logger.error(String.format("No cloud compute service with id '%1$s' for job '%2$s'. Cloud VM cannot be terminated", job.getComputeServiceId(), job.getId())); - } else { - cloudComputeService.terminateJob(job); - } - } catch (Exception e) { - logger.warn("Failed to terminate instance with id: " + job.getComputeInstanceId(), e); - } - } - } - - /** - * Kills all jobs of given series. - * - * @param request The servlet request including a seriesId parameter - * @param response The servlet response - * - * @return A JSON object with a success attribute and an error attribute - * in case the series was not found in the job manager. - * @throws PortalServiceException - */ - @RequestMapping("/secure/killSeriesJobs.do") - public ModelAndView killSeriesJobs(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("seriesId") Integer seriesId) throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - VEGLSeries series = attemptGetSeries(seriesId, user); - if (series == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup series."); - } - - List jobs = jobManager.getSeriesJobs(seriesId.intValue(), user); - if (jobs == null) { - logger.warn(String.format("Unable to lookup jobs for series id '%1$s'", seriesId)); - return generateJSONResponseMAV(false, null, "Unable to lookup jobs of series."); - } - - //Iterate our jobs, terminating as we go (abort iteration on failure) - for (VEGLJob job : jobs) { - //terminate the EMI instance - try { - logger.info("Cancelling job with ID "+ job.getId()); - - if (job.getStatus().equals(JobBuilderController.STATUS_DONE)) { - logger.info("Cancelling of job aborted as it has already been processed."); - continue; - } - - // terminate the EMI instance - terminateInstance(job, true); - } catch (Exception e) { - logger.error("Failed to cancel one of the jobs in a given series.", e); - return generateJSONResponseMAV(false, null, "There was a problem cancelling one of your jobs in selected series.", - "Please try again in a few minutes or report it to "+getAdminEmail()+"."); - } - } - - return generateJSONResponseMAV(true, null, ""); - } - - /** - * Returns a JSON object containing the latest copy of metadata for a given job's file - * - * @param jobId - * @return - */ - @RequestMapping("/secure/getCloudFileMetadata.do") - public ModelAndView getCloudFileMetadata(@RequestParam("jobId") Integer jobId, - @RequestParam("fileName") String fileName) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The requested job was not found."); - } - - CloudFileInformation fileDetails = null; - try { - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cloud files cannot be requested", job.getStorageServiceId(), job.getId())); - return generateJSONResponseMAV(false, null, "No cloud storage service found for job"); - } else { - fileDetails = cloudStorageService.getJobFileMetadata(job, fileName); - } - } catch (Exception e) { - logger.warn("Error fetching job file metadata.", e); - return generateJSONResponseMAV(false, null, "Error fetching file metadata"); - } - - return generateJSONResponseMAV(true, Arrays.asList(fileDetails), ""); - } - - /** - * Returns a JSON object containing an array of files belonging to a - * given job AND the associated download objects . - * - * @param request The servlet request including a jobId parameter - * @param response The servlet response - * - * @return A JSON object with a files attribute which is an array of - * FileInformation objects. If the job was not found in the job - * manager the JSON object will contain an error attribute - * indicating the error. - */ - @RequestMapping("/secure/jobCloudFiles.do") - public ModelAndView jobCloudFiles(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The requested job was not found."); - } - - CloudFileInformation[] fileDetails = null; - try { - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - return generateJSONResponseMAV(true, new CloudFileInformation[0], "No cloud storage service found for job"); - } else { - fileDetails = cloudStorageService.listJobFiles(job); - } - } catch (Exception e) { - logger.warn("Error fetching output directory information."+e.getMessage()); - logger.debug("Exception details:",e); - return generateJSONResponseMAV(false, null, "Error fetching output directory information"); - } - - return generateJSONResponseMAV(true, fileDetails, ""); - } - - /** - * - * @param request - * @param response - * @param jobId - * @return - */ - @RequestMapping("/secure/jobCloudDirectoriesAndFiles.do") - public ModelAndView jobCloudDirectoriesAndFiles(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The requested job was not found."); - } - - CloudDirectoryInformation directoryDetails = null; - try { - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - return generateJSONResponseMAV(true, new CloudDirectoryInformation("", null), "No cloud storage service found for job"); - } else { - directoryDetails = cloudStorageService.listJobDirectoriesAndFiles(job, new CloudDirectoryInformation("", null)); - } - } catch (Exception e) { - logger.warn("Error fetching output directory information."+e.getMessage()); - logger.debug("Exception details:",e); - return generateJSONResponseMAV(false, null, "Error fetching output directory information"); - } - - return generateJSONResponseMAV(true, directoryDetails, ""); - } - - /** - * Sends the contents of a job file to the client. - * - * @param request The servlet request including a jobId parameter and a - * filename parameter - * @param response The servlet response receiving the data - * - * @return null on success or the joblist view with an error parameter on - * failure. - */ - @RequestMapping("/secure/downloadFile.do") - public ModelAndView downloadFile(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") Integer jobId, - @RequestParam("filename") String fileName, - @RequestParam("key") String key) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup job object."); - } - - logger.debug("Download " + key); - - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cloud file cannot be downloaded", job.getStorageServiceId(), job.getId())); - return generateJSONResponseMAV(false, null, "No cloud storage service found for job"); - } - - response.setContentType("application/octet-stream"); - response.setHeader("Content-Disposition", "attachment; filename=\""+fileName+"\""); - - //Get our Input Stream - try (InputStream is = cloudStorageService.getJobFile(job, key)) { - try (OutputStream out = response.getOutputStream()) { - int n; - byte[] buffer = new byte[1024]; - while ((n = is.read(buffer)) != -1) { - out.write(buffer, 0, n); - } - out.flush(); - } catch (Exception e) { - logger.warn("Error whilst writing to output stream", e); - } - // The output is raw data down the output stream, just return null - return null; - } catch (Exception ex) { - logger.warn(String.format("Unable to access '%1$s' from the cloud", key), ex); - return generateJSONResponseMAV(false, null, "Unable to access file from the cloud"); - } - } - - /** - * Sends the contents of one or more job files as a ZIP archive to the - * client. - * - * @param request The servlet request including a jobId parameter and a - * files parameter with the filenames separated by comma - * @param response The servlet response receiving the data - * - * @return null on success or the joblist view with an error parameter on - * failure. - */ - @RequestMapping("/secure/downloadAsZip.do") - public ModelAndView downloadAsZip(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") Integer jobId, - @RequestParam("files") String filesParam) { - PortalUser user = userService.getLoggedInUser(); - //Lookup our job and check input files - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup job object."); - } - - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cloud file cannot be downloaded as zip", job.getStorageServiceId(), job.getId())); - return generateJSONResponseMAV(false, null, "No cloud storage service found for job"); - } - - logger.debug("filesParam: " + filesParam); - if (filesParam == null || filesParam.isEmpty()) { - return generateJSONResponseMAV(false, null, "No files have been selected."); - } - String[] fileKeys = filesParam.split(","); - logger.debug("Archiving " + fileKeys.length + " file(s) of job " + jobId); - - //Create a filename that is semi-unique to the job (and slightly human readable) - SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); - String downloadFileName = String.format("jobfiles_%1$s_%2$s.zip", job.getName(), sdf.format(job.getSubmitDate())); - downloadFileName = downloadFileName.replaceAll("[^0-9a-zA-Z_.]", "_"); - - - //Start writing our data to a zip archive (which is being streamed to user) - try { - response.setContentType("application/zip"); - response.setHeader("Content-Disposition", String.format("attachment; filename=\"%1$s\"", downloadFileName)); - - boolean readOneOrMoreFiles = false; - ZipOutputStream zout = new ZipOutputStream( - response.getOutputStream()); - for (String fileKey : fileKeys) { - try (InputStream is = cloudStorageService.getJobFile(job, fileKey)) { - byte[] buffer = new byte[16384]; - int count = 0; - zout.putNextEntry(new ZipEntry(fileKey)); - while ((count = is.read(buffer)) != -1) { - zout.write(buffer, 0, count); - } - zout.closeEntry(); - readOneOrMoreFiles = true; - } - } - - if (readOneOrMoreFiles) { - zout.finish(); - zout.flush(); - zout.close(); - return null; - } else { - zout.close(); - logger.warn("Could not access the files!"); - } - - } catch (IOException e) { - logger.warn("Could not create ZIP file", e); - } catch (Exception e) { - logger.warn("Error getting cloudObject data", e); - } - - return null; - } - - /** - * Returns a JSON object containing an array of series that match the query - * parameters. - * - * @param request The servlet request with query parameters - * @param response The servlet response - * - * @return A JSON object with a series attribute which is an array of - * VEGLSeries objects matching the criteria. - */ - @RequestMapping("/secure/querySeries.do") - public ModelAndView querySeries(HttpServletRequest request, - HttpServletResponse response, - @RequestParam(required=false, value="qSeriesName") String qName, - @RequestParam(required=false, value="qSeriesDesc") String qDesc) { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - return generateJSONResponseMAV(false); - } - - //User can only query his/her own job series - if (StringUtils.isEmpty(qName) && StringUtils.isEmpty(qDesc)) { - logger.debug("No query parameters provided. Will return "+user+"'s series."); - } - - logger.debug("qUser="+user.getEmail()+", qName="+qName+", qDesc="+qDesc); - List series = jobManager.querySeries(user.getEmail(), qName, qDesc); - - logger.debug("Returning list of "+series.size()+" series."); - return generateJSONResponseMAV(true, series, ""); - } - - /** - * Attempts to creates a new folder for the specified user. - * We are resusing existing code for series in place as folder - * The series object will be returned in a JSON response on success. - * - * @param seriesName - * @param seriesDescription - * @return - */ - @RequestMapping("/secure/createFolder.do") - public ModelAndView createFolder(HttpServletRequest request, - @RequestParam("seriesName") String seriesName, - @RequestParam("seriesDescription") String seriesDescription) { - PortalUser user = userService.getLoggedInUser(); - VEGLSeries series = new VEGLSeries(); - series.setUser(user.getEmail()); - series.setName(seriesName); - series.setDescription(seriesDescription); - - try { - jobManager.saveSeries(series); - } catch (Exception ex) { - logger.error("failure saving series", ex); - return generateJSONResponseMAV(false, null, "Failure saving series"); - } - return generateJSONResponseMAV(true); - - } - - /** - * Returns a JSON object containing an array of jobs for the given series. - * - * @param request The servlet request including a seriesId parameter - * @param response The servlet response - * - * @return A JSON object with a jobs attribute which is an array of - * VEGLJob objects. - * @throws PortalServiceException - */ - @RequestMapping("/secure/listJobs.do") - public ModelAndView listJobs(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("seriesId") Integer seriesId, - @RequestParam(required=false, value="forceStatusRefresh", defaultValue="false") boolean forceStatusRefresh) throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - VEGLSeries series = attemptGetSeries(seriesId, user); - if (series == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup job series."); - } - - List seriesJobs = jobManager.getSeriesJobs(seriesId.intValue(), user); - if (seriesJobs == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup jobs for the specified series."); - } - -// for (VEGLJob veglJob : seriesJobs) { -// veglJob.setProperty(CloudJob.PROPERTY_STS_ARN, user.getArnExecution()); -// veglJob.setProperty(CloudJob.PROPERTY_CLIENT_SECRET, user.getAwsSecret()); -// veglJob.setProperty(CloudJob.PROPERTY_S3_ROLE, user.getArnStorage()); -// } -// - if (forceStatusRefresh) { - try { - jobStatusMonitor.statusUpdate(seriesJobs); - } catch (JobStatusException e) { - log.info("There was an error updating one or more jobs: " + e.getMessage()); - log.debug("Exception(s): ", e); - } - } - - return generateJSONResponseMAV(true, seriesJobs, ""); - } - - /** - * Sets a user's job series ID to a new ID (which can be null indicating default job) - * - * This will fail if user is not the owner of the job or the new series. - * - * @param request - * @param jobId - * @param seriesId - * @param user - * @return - */ - @RequestMapping("/secure/setJobFolder.do") - public ModelAndView setJobFolder(HttpServletRequest request, - @RequestParam("jobIds") Integer[] jobIds, - @RequestParam(required=false, value="seriesId") Integer seriesId) { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - return generateJSONResponseMAV(false); - } - - for(Integer jobId : jobIds) { - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false); - } - - //We allow a null series ID - if (seriesId != null) { - VEGLSeries series = jobManager.getSeriesById(seriesId, user.getEmail()); - if (!series.getUser().equals(user.getEmail())) { - return generateJSONResponseMAV(false); - } - } - - job.setSeriesId(seriesId); - jobManager.saveJob(job); - } - return generateJSONResponseMAV(true); - } - - /** - * Returns a JSON array of jobStatus and jobId tuples - * @throws PortalServiceException - * - */ - @RequestMapping("/secure/jobsStatuses.do") - public ModelAndView jobStatuses(HttpServletRequest request, - HttpServletResponse response, - @RequestParam(required=false, value="forceStatusRefresh", defaultValue="false") boolean forceStatusRefresh) throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - return generateJSONResponseMAV(false); - } - - List userJobs = jobManager.getUserJobs(user); - if (userJobs == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup jobs."); - } - - if (forceStatusRefresh) { - try { - jobStatusMonitor.statusUpdate(userJobs); - } catch (JobStatusException e) { - log.info("There was an error updating one or more jobs: " + e.getMessage()); - log.debug("Exception(s): ", e); - } - } - - List tuples = new ArrayList<>(userJobs.size()); - for (VEGLJob job : userJobs) { - ModelMap tuple = new ModelMap(); - tuple.put("jobId", job.getId()); - tuple.put("status", job.getStatus()); - tuples.add(tuple); - } - - return generateJSONResponseMAV(true, tuples, ""); - } - - /** - * Returns a JSON object containing an tree of all jobs, grouped by series. - * Also returns an array of job objects - * - * @param request The servlet request including a seriesId parameter - * @param response The servlet response - * - * @return A JSON object with a jobs attribute which is an array of - * VEGLJob objects. - * @throws PortalServiceException - */ - @SuppressWarnings("unchecked") - @RequestMapping("/secure/treeJobs.do") - public ModelAndView treeJobs(HttpServletRequest request, - HttpServletResponse response, - @RequestParam(required=false, value="forceStatusRefresh", defaultValue="false") boolean forceStatusRefresh) throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - return generateJSONResponseMAV(false); - } - - List userSeries = jobManager.querySeries(user.getEmail(), null, null); - List userJobs = jobManager.getUserJobs(user); - if (userSeries == null || userJobs == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup jobs."); - } - - if (forceStatusRefresh) { - try { - jobStatusMonitor.statusUpdate(userJobs); - } catch (JobStatusException e) { - log.info("There was an error updating one or more jobs: " + e.getMessage()); - log.debug("Exception(s): ", e); - } - } - - //Now we organise into a tree structure - ModelMap rootNode = new ModelMap(); - rootNode.put("name", user.getEmail()); - rootNode.put("expanded", true); - rootNode.put("expandable", true); - rootNode.put("leaf", false); - rootNode.put("root", true); - rootNode.put("seriesId", null); - rootNode.put("children", new ArrayList()); - - Map nodeMap = new HashMap<>(); - for (VEGLSeries series : userSeries) { - ModelMap node = new ModelMap(); - node.put("leaf", false); - node.put("expanded", false); - node.put("expandable", true); - node.put("name", series.getName()); - node.put("seriesId", series.getId()); - node.put("children", new ArrayList()); - - nodeMap.put(series.getId(), node); - ((ArrayList) rootNode.get("children")).add(node); - } - - for (VEGLJob job : userJobs) { - ModelMap nodeParent = nodeMap.get(job.getSeriesId()); - if (nodeParent == null) { - nodeParent = rootNode; - } - - ModelMap node = new ModelMap(); - node.put("leaf", true); - node.put("name", job.getName()); - node.put("id", job.getId()); - node.put("submitDate", job.getSubmitDate()); - node.put("status", job.getStatus()); - node.put("seriesId", job.getSeriesId()); - - ((ArrayList) nodeParent.get("children")).add(node); - } - - ModelMap resultObj = new ModelMap(); - resultObj.put("nodes", rootNode); - resultObj.put("jobs", userJobs); - - return generateJSONResponseMAV(true, resultObj, ""); - } - - /** - * Tests whether the specified cloud file appears in a list of fileNames - * - * If fileNames is null, true will be returned - * @param files - * @param fileName - * @return - */ - private static boolean cloudFileIncluded(String[] fileNames, CloudFileInformation cloudFile) { - if (fileNames == null) { - return false; - } - - for (String fileName : fileNames) { - if (cloudFile.getName().endsWith(fileName)) { - return true; - } - } - - return false; - } - - /** - * Copies job files and/or downloads from sourceJobId to targetJobId - * - * Job files will be duplicated in LOCAL staging only. The files duplicated can be - * controlled by a list of file names - * - * Job downloads will be copied directly (but new IDs minted) - */ - @RequestMapping("/secure/copyJobFiles.do") - public ModelAndView copyJobFiles(HttpServletRequest request, - @RequestParam("targetJobId") Integer targetJobId, - @RequestParam("sourceJobId") Integer sourceJobId, - @RequestParam(required=false, value="fileKey") String[] fileKeys, - @RequestParam(required=false, value="downloadId") Integer[] downloadIds) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob sourceJob = attemptGetJob(sourceJobId, user); - VEGLJob targetJob = attemptGetJob(targetJobId, user); - - if (sourceJob == null || targetJob == null) { - logger.error(String.format("sourceJob %1$s or targetJob %2$s inaccessible to user %3$s", sourceJobId, targetJobId, user)); - return generateJSONResponseMAV(false, null, "Unable to copy files"); - } - - CloudStorageService cloudStorageService = getStorageService(sourceJob); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cannot copy files", sourceJob.getStorageServiceId(), sourceJob.getId())); - return generateJSONResponseMAV(false, null, "No cloud storage service found for job"); - } - - try { - if (fileKeys != null) { - for (String fileKey : fileKeys) { - try (InputStream is = cloudStorageService.getJobFile(sourceJob, fileKey); - OutputStream os = fileStagingService.writeFile(targetJob, fileKey)) { - IOUtils.copy(is, os); - } - } - } - - List targetDownloads = targetJob.getJobDownloads(); - if (downloadIds != null) { - for (Integer downloadId : downloadIds) { - for (VglDownload download : sourceJob.getJobDownloads()) { - if (download.getId().equals(downloadId)) { - VglDownload newDownload = (VglDownload) download.clone(); - newDownload.setParent(targetJob); - newDownload.setId(null); - targetDownloads.add(newDownload); - } - } - } - jobManager.saveJob(targetJob); - } - return generateJSONResponseMAV(true); - } catch (Exception ex) { - logger.error("Error copying files for job.", ex); - return generateJSONResponseMAV(false); - } - } - - /** - * Duplicates the job given by its reference, the new job object is returned. - * - * Job files will be duplicated in LOCAL staging only. The files duplicated can be - * controlled by a list of file names - */ - @RequestMapping("/secure/duplicateJob.do") - public ModelAndView duplicateJob(HttpServletRequest request, - HttpServletResponse response, - @RequestParam("jobId") Integer jobId, - @RequestParam(required=false, value="file") String[] files) { - logger.info("Duplicate a new job from job ID "+ jobId); - PortalUser user = userService.getLoggedInUser(); - //Lookup the job we are cloning - VEGLJob oldJob; - try { - oldJob = attemptGetJob(jobId, user); - } catch (AccessDeniedException e) { - throw e; - } - - if (oldJob == null) { - return generateJSONResponseMAV(false, null, "Unable to lookup job to duplicate."); - } - - CloudStorageService cloudStorageService = getStorageService(oldJob); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cannot duplicate", oldJob.getStorageServiceId(), oldJob.getId())); - return generateJSONResponseMAV(false, null, "No cloud storage service found for job"); - } - - //Create a cloned job but make it 'unsubmitted' - VEGLJob newJob = oldJob.safeClone(); - newJob.setSubmitDate((Date)null); - newJob.setStatus(JobBuilderController.STATUS_UNSUBMITTED); - newJob.setRegisteredUrl(null); - newJob.setComputeInstanceId(null); - - //Attempt to save the new job to the DB - try { - jobManager.saveJob(newJob); - //This needs to be set AFTER we first save the job (the ID will form part of the key) - newJob.setStorageBaseKey(cloudStorageService.generateBaseKey(newJob)); - jobManager.saveJob(newJob); - } catch (Exception ex) { - log.error("Unable to save job to database: " + ex.getMessage(), ex); - return generateJSONResponseMAV(false, null, "Unable to save new job."); - } - - try { - //Lets setup a staging area for the input files - fileStagingService.generateStageInDirectory(newJob); - //Write every file to the local staging area - CloudFileInformation[] cloudFiles = cloudStorageService.listJobFiles(oldJob); - for (CloudFileInformation cloudFile : cloudFiles) { - if (cloudFileIncluded(files, cloudFile)) { - try (InputStream is = cloudStorageService.getJobFile(oldJob, cloudFile.getName()); - OutputStream os = fileStagingService.writeFile(newJob, cloudFile.getName())) { - - FileIOUtil.writeInputToOutputStream(is, os, 1024 * 1024, false); - } - } - } - } catch (Exception ex) { - log.error("Unable to duplicate input files: " + ex.getMessage(), ex); - //Tidy up after ourselves - jobManager.deleteJob(newJob); - // Tidy the stage in area (we don't need it any more - all files are replicated in the cloud) - // Failure here is NOT fatal - it will just result in some residual files - fileStagingService.deleteStageInDirectory(newJob); - return generateJSONResponseMAV(false, null, "Unable to save new job."); - } - - jobManager.createJobAuditTrail(null, newJob, "Job duplicated."); - return generateJSONResponseMAV(true, Arrays.asList(newJob), ""); - } - - /** - * Gets a pre parsed version of the internal logs. The resulting object will - * contain the logs sectioned into 'named sections' eg: Section for python code, section for environment etc - * - * Will always contain a single section called "Full" containing the unsectioned original log - * - * @param jobId - * @return - */ - @RequestMapping("/secure/getSectionedLogs.do") - public ModelAndView getSectionedLogs(HttpServletRequest request, - @RequestParam("jobId") Integer jobId, - @RequestParam(value="file", required=false) String file) { - PortalUser user = userService.getLoggedInUser(); - //Lookup the job whose logs we are accessing - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The specified job does not exist."); - } - - ModelMap namedSections = null; - try { - namedSections = jobStatusLogReader.getSectionedLogs(job, file == null ? VL_LOG_FILE : file); - } catch (PortalServiceException ex) { - return generateJSONResponseMAV(false, null, ex.getMessage()); - } - - return generateJSONResponseMAV(true, Arrays.asList(namedSections), ""); - } - - /** - * Gets a raw dump of the instance logs (as reported by the cloud) for a particular job. This request - * will fail if the instance has been terminated. - * - * - * @param jobId - * @return - */ - @RequestMapping("/secure/getRawInstanceLogs.do") - public ModelAndView getRawInstanceLogs(HttpServletRequest request, - @RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - //Lookup the job whose logs we are accessing - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The specified job does not exist."); - } - - try { - CloudComputeService service = getComputeService(job); - if (service == null) { - return generateJSONResponseMAV(false, null, "The compute service exists for this job no longer exists."); - } - String rawLog = service.getConsoleLog(job, 5000); - if (StringUtils.isEmpty(rawLog)) { - return generateJSONResponseMAV(false, null, "No compute logs were accessible for this job."); - } - return generateJSONResponseMAV(true, rawLog, ""); - } catch (PortalServiceException ex) { - return generateJSONResponseMAV(false, null, ex.getMessage()); - } - } - - @RequestMapping("/secure/getPlaintextPreview.do") - public ModelAndView getPlaintextPreview( - @RequestParam("jobId") Integer jobId, - @RequestParam("file") String file, - @RequestParam("maxSize") Integer maxSize) { - PortalUser user = userService.getLoggedInUser(); - final int charactersPerMegabyte = 1048576; - if (maxSize > 50 * charactersPerMegabyte) { - maxSize = 50 * charactersPerMegabyte; //Don't allow us to burn GB's on previews - } - - //Lookup the job whose logs we are accessing - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The specified job does not exist."); - } - - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cloud file cannot be downloaded", job.getStorageServiceId(), job.getId())); - return generateJSONResponseMAV(false, null, "No cloud storage service found for job"); - } - - try (InputStream is = cloudStorageService.getJobFile(job, file)) { - InputStreamReader reader = new InputStreamReader(is, StandardCharsets.UTF_8); - char[] buffer = new char[maxSize]; - int charsRead = reader.read(buffer); - if (charsRead < 0) { - return generateJSONResponseMAV(false, null, "Error reading file from cloud storage."); - } - return generateJSONResponseMAV(true, new String(buffer, 0, charsRead), ""); - } catch (Exception ex) { - logger.error("Error accessing file:" + file, ex); - return generateJSONResponseMAV(false); - } - } - - @RequestMapping("/secure/getImagePreview.do") - public void getImagePreview( - HttpServletResponse response, - @RequestParam("jobId") Integer jobId, - @RequestParam("file") String file) throws Exception { - PortalUser user = userService.getLoggedInUser(); - //Lookup the job whose logs we are accessing - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - response.sendError(HttpStatus.SC_NOT_FOUND); - return; - } - - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cloud file cannot be downloaded", job.getStorageServiceId(), job.getId())); - response.sendError(HttpStatus.SC_INTERNAL_SERVER_ERROR); - return; - } - - try (InputStream is = cloudStorageService.getJobFile(job, file)) { - response.setContentType("image"); - try (ServletOutputStream os = response.getOutputStream()) { - IOUtils.copy(is, os); - } - } - } - - @RequestMapping("/secure/getJSONPreview.do") - public ModelAndView getJSONPreview( - HttpServletResponse response, - @RequestParam("jobId") Integer jobId, - @RequestParam("file") String file, - @RequestParam("arrayPosition") Integer arrayPosition, - @RequestParam("zip") Boolean zip) throws Exception { - PortalUser user = userService.getLoggedInUser(); - //Lookup the job whose logs we are accessing - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The specified job does not exist."); - } - CloudStorageService cloudStorageService = getStorageService(job); - if (cloudStorageService == null) { - logger.error(String.format("No cloud storage service with id '%1$s' for job '%2$s'. Cloud file cannot be downloaded", job.getStorageServiceId(), job.getId())); - return generateJSONResponseMAV(false, null, "No cloud storage service found for job"); - } - String jsonResult = ""; - try (InputStream is = cloudStorageService.getJobFile(job, file)) { - if (zip) { - System.out.println("Zipped file"); - ZipInputStream zis = new ZipInputStream(new BufferedInputStream(is)); - StringBuilder s = new StringBuilder(); - byte[] buffer = new byte[1024]; - int read = 0; - ZipEntry entry; - while ((entry = zis.getNextEntry()) != null) { - while ((read = zis.read(buffer, 0, 1024)) >= 0) { - s.append(new String(buffer, 0, read)); - } - zis.closeEntry(); - } - zis.close(); - jsonResult = s.toString(); - } else { - jsonResult = new String(is.readAllBytes(), StandardCharsets.UTF_8); - } - if (arrayPosition != null) { - //jsonResult = s.toString(); - JSONArray jsonArray = new JSONArray(jsonResult); - if (arrayPosition < jsonArray.length()) { - JSONObject jsonObj = jsonArray.getJSONObject(arrayPosition); - jsonResult = jsonObj.toString(); - } - } - //System.out.println("jsonResult: " + jsonResult); - } catch (Exception ex) { - logger.error("Error accessing JSON file:" + file, ex); - return generateJSONResponseMAV(false); - } - - return generateJSONResponseMAV(true, jsonResult, ""); - } - - /** - * Gets all AuditLog entries for the specified jobId. If the authenticated user doesn't own the specified job an error will be returned. - * @param jobId - * @param user - * @return - */ - @RequestMapping("/secure/getAuditLogsForJob.do") - public ModelAndView getAuditLogsForJob(@RequestParam("jobId") Integer jobId) { - PortalUser user = userService.getLoggedInUser(); - VEGLJob job = attemptGetJob(jobId, user); - if (job == null) { - return generateJSONResponseMAV(false, null, "The specified job does not exist."); - } - - try { - List auditLogs = jobAuditLogService.getAuditLogsOfJob(jobId); - return generateJSONResponseMAV(true, auditLogs, ""); - } catch (Exception ex) { - log.error("Unable to access job audit logs for " + jobId + ": " + ex.getMessage()); - log.debug("Exception:", ex); - return generateJSONResponseMAV(false, null, "Unable to access audit logs for this job due to an internal error. Please try refreshing the page."); - } - } - - @ExceptionHandler(AccessDeniedException.class) - @ResponseStatus(value = org.springframework.http.HttpStatus.FORBIDDEN) - public @ResponseBody String handleException(AccessDeniedException e) { - return e.getMessage(); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/controllers/PurchaseController.java b/src/main/java/org/auscope/portal/server/web/controllers/PurchaseController.java deleted file mode 100644 index 873a0a30d..000000000 --- a/src/main/java/org/auscope/portal/server/web/controllers/PurchaseController.java +++ /dev/null @@ -1,582 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.awt.Dimension; -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.http.client.methods.HttpRequestBase; -import org.auscope.portal.core.server.OgcServiceProviderType; -import org.auscope.portal.core.server.controllers.BasePortalController; -import org.auscope.portal.core.services.CSWFilterService; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.csw.CSWServiceItem; -import org.auscope.portal.core.services.methodmakers.WCSMethodMaker; -import org.auscope.portal.core.services.methodmakers.filter.FilterBoundingBox; -import org.auscope.portal.core.services.responses.csw.CSWGeographicBoundingBox; -import org.auscope.portal.server.vegl.VGLDataPurchase; -import org.auscope.portal.server.vegl.VGLJobPurchase; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.SimpleWfsService; -import org.auscope.portal.server.web.service.VGLPurchaseService; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.servlet.ModelAndView; - -import com.google.gson.JsonArray; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; -import com.stripe.Stripe; -import com.stripe.exception.CardException; -import com.stripe.model.Charge; - -@Controller -public class PurchaseController extends BasePortalController { - - public static final String SESSION_DOWNLOAD_LIST = "jobDownloadList"; - - protected final Log logger = LogFactory.getLog(getClass()); - - @Value("${cloud.stripeApiKey}") - private String stripeApiKey; - - @Value("${cloud.erddapservice.url}") - private String erddapServiceUrl; - - @Autowired - private PortalUserService userService; - - @Autowired - private VGLPurchaseService purchaseService; - - private SimpleWfsService simpleWfsService; - - private CSWFilterService cswFilterService; - - // @Autowired - public PurchaseController(SimpleWfsService simpleWfsService, CSWFilterService cswFilterService) { - this.simpleWfsService = simpleWfsService; - this.cswFilterService = cswFilterService; - } - - /** - * handle exception - * @param ex - * @return - */ - @ResponseStatus(value = org.springframework.http.HttpStatus.BAD_REQUEST) - public @ResponseBody String handleException(IllegalArgumentException ex) { - return ex.getMessage(); - } - - - /** - * Utility for adding a single VglDownload object to the session based array of VglDownload objects. - * @param request - * @param download - */ - private void addDownloadToSession(HttpServletRequest request, VglDownload download) { - @SuppressWarnings("unchecked") - List erddapUrlList = (List) request.getSession().getAttribute(SESSION_DOWNLOAD_LIST); - if (erddapUrlList == null) { - erddapUrlList = new ArrayList<>(); - } - - logger.info("Adding download " + download.getUrl() + " to session download list"); - synchronized(erddapUrlList) { - erddapUrlList.add(download); - } - logger.info("session download list now:"); - for (VglDownload dl: erddapUrlList) { - logger.info(dl.getUrl()); - } - request.getSession().setAttribute(SESSION_DOWNLOAD_LIST, erddapUrlList); - } - - - /** - * process data payment request with Stripe - * @param request - * @param response - * @throws Exception - */ - @RequestMapping(value = "/processDataPayment.do", method = RequestMethod.POST) - public void processDataPayment(HttpServletRequest request, - HttpServletResponse response) - throws Exception { - - Stripe.apiKey = this.stripeApiKey; - logger.info("process data payment setting stripe api key to: " + this.stripeApiKey); - - String result = null; - - float amount = 0; - String tokenId = null; - String email = null; - - JsonArray dataToPurchase = null; - - // read the input data - try (InputStream in = request.getInputStream(); - final BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"))) { - final char[] buffer = new char[1024]; - int numRead; - StringBuffer inputData = new StringBuffer(""); - while ((numRead = reader.read(buffer)) != -1) { - inputData.append(new String(buffer, 0, numRead)); - } - in.close(); - reader.close(); - - logger.info("got input data: " + inputData.toString()); - JsonObject postData = (JsonObject) JsonParser - .parseString(inputData.toString()); - amount = postData.getAsJsonPrimitive("amount").getAsFloat(); - tokenId = postData.getAsJsonPrimitive("tokenId").getAsString(); - email = postData.getAsJsonPrimitive("email").getAsString(); - dataToPurchase = postData.getAsJsonArray("dataToPurchase"); - logger.info("amount = " + amount + ", tokenId = " + tokenId - + ", email = " + email); - - } catch (Exception e) { - e.printStackTrace(); - JsonObject err = new JsonObject(); - err.addProperty("message", - "failed to parse payment input data: " + e.getMessage()); - result = err.toString(); - } - - // confirm that user is logged in - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - JsonObject err = new JsonObject(); - err.addProperty("message", - "Unable to process payment for anonymous user. Please log in to proceed with purchase."); - result = err.toString(); - } else { - logger.info("user: " + user.getId()); - } - - if (tokenId != null - && user != null - && dataToPurchase != null && dataToPurchase.size() > 0) { - - Map chargeParams = new HashMap(); - chargeParams.put("amount", (int) (amount * 100)); // cents! - chargeParams.put("currency", "aud"); - chargeParams.put("description", "Test Charge"); - chargeParams.put("source", tokenId); - chargeParams.put("receipt_email", email); - - Map metadata = new HashMap(); - metadata.put("order_id", "1234"); // TODO: put in a meaningful value - // here! - chargeParams.put("metadata", metadata); - - try { - Charge charge = Charge.create(chargeParams); - logger.info( - "Charge processed successfully, received charge object: " - + charge.toJson()); - String chargeJson = charge.toJson(); - JsonObject chargeData = (JsonObject) JsonParser.parseString(chargeJson); - JsonObject resultData = new JsonObject(); - resultData.add("charge", chargeData); - JsonArray downloadUrls = new JsonArray(); - resultData.add("downloadUrls", downloadUrls); - - // store all transaction records in the database - - for (int i = 0; i < dataToPurchase.size(); i++) { - try { - - JsonObject dataset = dataToPurchase.get(i).getAsJsonObject(); - - JsonObject cswRecord = dataset.getAsJsonObject("cswRecord"); - - JsonObject onlineResource = dataset.getAsJsonObject("onlineResource"); - String onlineResourceType = getAsString(onlineResource,"type"); - String url = getAsString(onlineResource , "url"); - String description = getAsString(onlineResource, "description"); - - JsonObject downloadOptions = dataset.getAsJsonObject("downloadOptions"); - String localPath = getAsString(downloadOptions, "localPath"); - String name = getAsString(cswRecord,"name"); - Double northBoundLatitude = getAsDouble(downloadOptions, "northBoundLatitude"); - Double southBoundLatitude =getAsDouble(downloadOptions, "southBoundLatitude"); - Double eastBoundLongitude = getAsDouble(downloadOptions, "eastBoundLongitude"); - Double westBoundLongitude = getAsDouble(downloadOptions, "westBoundLongitude"); - - logger.info("to store in the purchases table: " + cswRecord + "," + onlineResourceType + "," - + url + "," + localPath + "," + name + "," + description + "," - + northBoundLatitude + "," + southBoundLatitude + "," - + eastBoundLongitude + "," + westBoundLongitude); - - String downloadUrl = getDownloadUrl(onlineResourceType, downloadOptions, getAsString(cswRecord, "recordInfoUrl")); - - // TODO: should probably extract the timestamp from the strip result json - VGLDataPurchase vglPurchase = new VGLDataPurchase(new Date(), amount, downloadUrl, cswRecord.toString(), - onlineResourceType, url, localPath, name, description, - northBoundLatitude, southBoundLatitude, eastBoundLongitude, westBoundLongitude, chargeJson, - user); - Integer id = purchaseService.saveDataPurchase(vglPurchase); - logger.info("saved user purchase to database, purchase id is: " + id + ", download url is: " + downloadUrl); - JsonObject downloadUrlObj = new JsonObject(); - downloadUrlObj.addProperty("url", downloadUrl); - downloadUrlObj.addProperty("name", name); - downloadUrls.add(downloadUrlObj); - - // save download to session - VglDownload newDownload = new VglDownload(); - newDownload.setName(name); - newDownload.setDescription(description); - newDownload.setLocalPath(localPath); - newDownload.setUrl(downloadUrl); - newDownload.setNorthBoundLatitude(northBoundLatitude); - newDownload.setEastBoundLongitude(eastBoundLongitude); - newDownload.setSouthBoundLatitude(southBoundLatitude); - newDownload.setWestBoundLongitude(westBoundLongitude); - this.addDownloadToSession(request, newDownload); - - } catch (Exception e) { - e.printStackTrace(); - } - } - result = resultData.toString(); - - } catch (CardException e) { - // If it's a decline, CardException will be caught - logger.warn("Card payment failed with error: " + e.getCode() - + " (" + e.getMessage() + ")"); - JsonObject err = new JsonObject(); - err.addProperty("message", - "Stripe card payment failed: " + e.getMessage()); - result = err.toString(); - - } catch (Exception e) { - logger.warn("Exception while processing stripe payment: " - + e.getMessage()); - JsonObject err = new JsonObject(); - err.addProperty("message", - "Stripe card payment failed: " + e.getMessage()); - result = err.toString(); - } - } - - response.setContentType("application/json"); - PrintWriter writer = response.getWriter(); - writer.println(result); - writer.close(); - } - - /** - * process job payment request with Stripe - * @param request - * @param response - * @throws Exception - */ - @RequestMapping(value = "/processJobPayment.do", method = RequestMethod.POST) - public void processJobPayment(HttpServletRequest request, - HttpServletResponse response) - throws Exception { - - Stripe.apiKey = this.stripeApiKey; - logger.info("process job payment setting stripe api key to: " + this.stripeApiKey); - - String result = null; - - float amount = 0; - String tokenId = null; - String email = null; - int jobId = 0; - String jobName = null; - - // read the input data - try (InputStream in = request.getInputStream(); - final BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"))) { - final char[] buffer = new char[1024]; - int numRead; - StringBuffer inputData = new StringBuffer(""); - while ((numRead = reader.read(buffer)) != -1) { - inputData.append(new String(buffer, 0, numRead)); - } - in.close(); - reader.close(); - - logger.info("got input data: " + inputData.toString()); - JsonObject postData = (JsonObject) JsonParser - .parseString(inputData.toString()); - amount = postData.getAsJsonPrimitive("amount").getAsFloat(); - tokenId = postData.getAsJsonPrimitive("tokenId").getAsString(); - email = postData.getAsJsonPrimitive("email").getAsString(); - jobId = postData.getAsJsonPrimitive("jobId").getAsInt(); - jobName = postData.getAsJsonPrimitive("jobName").getAsString(); - - logger.info("amount = " + amount + ", tokenId = " + tokenId - + ", email = " + email + ", jobId = " + jobId + ", jobName = " + jobName); - - } catch (Exception e) { - e.printStackTrace(); - JsonObject err = new JsonObject(); - err.addProperty("message", - "failed to parse payment input data: " + e.getMessage()); - result = err.toString(); - } - - // confirm that user is logged in - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - JsonObject err = new JsonObject(); - err.addProperty("message", - "Unable to process payment for anonymous user. Please log in to proceed with purchase."); - result = err.toString(); - } else { - logger.info("user: " + user.getId()); - } - - if (tokenId != null && user != null) { - - Map chargeParams = new HashMap(); - chargeParams.put("amount", (int) (amount * 100)); // cents! - chargeParams.put("currency", "aud"); - chargeParams.put("description", "Test Charge"); - chargeParams.put("source", tokenId); - chargeParams.put("receipt_email", email); - - Map metadata = new HashMap(); - metadata.put("order_id", "1234"); // TODO: put in a meaningful value - // here! - chargeParams.put("metadata", metadata); - - try { - Charge charge = Charge.create(chargeParams); - logger.info( - "Charge processed successfully, received charge object: " - + charge.toJson()); - String chargeJson = charge.toJson(); - JsonObject chargeData = (JsonObject)JsonParser.parseString(chargeJson); - JsonObject resultData = new JsonObject(); - resultData.add("charge", chargeData); - - // store job transaction record in the database - - // TODO: should probably extract the timestamp from the strip result json - VGLJobPurchase vglPurchase = new VGLJobPurchase(new Date(), amount, jobId, jobName, chargeJson, user); - Integer id = purchaseService.saveJobPurchase(vglPurchase); - logger.info("saved user job purchase to database, purchase id is: " + id ); - - result = resultData.toString(); - - } catch (CardException e) { - // If it's a decline, CardException will be caught - logger.warn("Card payment failed with error: " + e.getCode() - + " (" + e.getMessage() + ")"); - JsonObject err = new JsonObject(); - err.addProperty("message", - "Stripe card payment failed: " + e.getMessage()); - result = err.toString(); - - } catch (Exception e) { - logger.warn("Exception while processing stripe payment: " - + e.getMessage()); - JsonObject err = new JsonObject(); - err.addProperty("message", - "Stripe card payment failed: " + e.getMessage()); - result = err.toString(); - } - } - - response.setContentType("application/json"); - PrintWriter writer = response.getWriter(); - writer.println(result); - writer.close(); - } - - - - /** - * Retrieves all purchase made by user. - * @param user - * @return - * @throws PortalServiceException - */ - @RequestMapping("/getDataPurchases.do") - public ModelAndView getPurchases() throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - List purchases = purchaseService.getDataPurchasesByUser(user); - return generateJSONResponseMAV(true, purchases, ""); - } - - private String getAsString(JsonObject parent, String name) { - if (parent != null && parent.has(name)) { - return parent.getAsJsonPrimitive(name).getAsString(); - } - return null; - } - - private Double getAsDouble(JsonObject parent, String name) { - if (parent != null && parent.has(name)) { - return parent.getAsJsonPrimitive(name).getAsDouble(); - } - return null; - } - - private Integer getAsInt(JsonObject parent, String name) { - if (parent != null && parent.has(name)) { - return parent.getAsJsonPrimitive(name).getAsInt(); - } - return null; - } - - // returns substring of the url in the form http(s)://server:port/endpoint (port may be missing) - private String getBaseUrl(String url) { - int startIndex = 7; - if (url.startsWith("https")) { - startIndex = 8; - } - int firstSlashIndex = url.indexOf("/", startIndex); - if (firstSlashIndex != -1) { - int end = url.indexOf("/", firstSlashIndex + 1); - if (end == -1) { - end = url.length(); - } - return url.substring(0, end); - } - // if get here, then just return the url as is - return url; - } - - private OgcServiceProviderType getServiceType(String serviceUrl) { - - OgcServiceProviderType serviceType = null; - String serviceBaseUrl = getBaseUrl(serviceUrl); - log.info("service base url = " + serviceBaseUrl); - CSWServiceItem[] serviceItems = this.cswFilterService.getCSWServiceItems(); - for (CSWServiceItem item: serviceItems) { - String itemBaseUrl = getBaseUrl(item.getRecordInformationUrl()); - log.info("item base url = " + itemBaseUrl); - if (itemBaseUrl.contentEquals(serviceBaseUrl)) { - serviceType = item.getServerType(); - break; - } - } - return serviceType; - } - - - private String getDownloadUrl(String onlineResourceType, JsonObject downloadOptions, String cswRecordInfoUrl) { - - String name = getAsString(downloadOptions, "name"); - String url = getAsString(downloadOptions, "url"); - - log.info("downloadOptions url = " + url); - Double northBoundLatitude = getAsDouble(downloadOptions, "northBoundLatitude"); - Double southBoundLatitude = getAsDouble(downloadOptions, "southBoundLatitude"); - Double eastBoundLongitude = getAsDouble(downloadOptions, "eastBoundLongitude"); - Double westBoundLongitude = getAsDouble(downloadOptions, "westBoundLongitude"); - - switch (onlineResourceType) { - - case "WCS": { - - OgcServiceProviderType serviceType = getServiceType(cswRecordInfoUrl); - log.info("WCS service type = " + serviceType); - - if (serviceType != null && (serviceType == OgcServiceProviderType.GeoServer || serviceType == OgcServiceProviderType.PyCSW)) { - //http://localhost:8090/geoserver/wcs?service=WCS&request=GetCoverage&coverageId=tasmax_djf&format=geotiff&srsName=EPSG%3A4326&bbox=-34.68404023638139%2C150.83192110061643%2C-34.66371104796619%2C150.86144685745234%2Curn%3Aogc%3Adef%3Acrs%3AEPSG%3A4326&&version=2.0.0 - - String layerName = getAsString(downloadOptions, "layerName"); - String bboxCrs = "EPSG:4326";//getAsString(downloadOptions,"crs"); - - CSWGeographicBoundingBox bbox = new CSWGeographicBoundingBox(westBoundLongitude, eastBoundLongitude, southBoundLatitude, northBoundLatitude); - HttpRequestBase downloadUrl = null; - try { - WCSMethodMaker wcsMethodMaker = new WCSMethodMaker(); - downloadUrl = wcsMethodMaker.getCoverageMethod(url, layerName, "geotiff", bboxCrs, new Dimension(1000,1000), - null, bboxCrs, bbox, null, null); - } catch (Exception ex) { - log.warn(String.format("Exception generating service request for '%2$s' from '%1$s': %3$s", url, layerName, ex)); - ex.printStackTrace(); - return null; - } - return downloadUrl.getRequestLine().getUri(); - - } else { // assume ERDDAP??? - - // Unfortunately ERDDAP requests that extend beyond the spatial bounds of the dataset - // will fail. To workaround this, we need to crop our selection to the dataset bounds - Double dsNorthBoundLatitude = getAsDouble(downloadOptions, "dsNorthBoundLatitude"); - Double dsSouthBoundLatitude = getAsDouble(downloadOptions, "dsSouthBoundLatitude"); - Double dsEastBoundLongitude = getAsDouble(downloadOptions, "dsEastBoundLongitude"); - Double dsWestBoundLongitude = getAsDouble(downloadOptions, "dsWestBoundLongitude"); - - if (dsEastBoundLongitude != null && (dsEastBoundLongitude < eastBoundLongitude)) - eastBoundLongitude = dsEastBoundLongitude; - if (dsWestBoundLongitude != null && (dsWestBoundLongitude > westBoundLongitude)) - westBoundLongitude = dsWestBoundLongitude; - if (dsNorthBoundLatitude != null && (dsNorthBoundLatitude < northBoundLatitude)) - northBoundLatitude = dsNorthBoundLatitude; - if (dsSouthBoundLatitude != null && (dsSouthBoundLatitude > southBoundLatitude)) - southBoundLatitude = dsSouthBoundLatitude; - - String layerName = getAsString(downloadOptions, "layerName"); - String format = getAsString(downloadOptions, "format"); - - // convert bbox co-ordinates to ERDDAP an ERDDAP dimension string - String erddapDimensions = "%5B("+ southBoundLatitude +"):1:("+ northBoundLatitude - + ")%5D%5B("+ westBoundLongitude +"):1:("+ eastBoundLongitude +")%5D"; - return this.erddapServiceUrl + layerName + "." + format + "?" + layerName + erddapDimensions; - } - } - - case "WFS": { - String serviceUrl = getAsString(downloadOptions, "serviceUrl"); - String featureType = getAsString(downloadOptions,"featureType"); - String srsName = getAsString(downloadOptions,"srsName"); - String outputFormat = getAsString(downloadOptions,"outputFormat"); - Integer maxFeatures = getAsInt(downloadOptions,"maxFeatures"); - String bboxCrs = getAsString(downloadOptions,"crs"); - - FilterBoundingBox bbox = FilterBoundingBox.parseFromValues(bboxCrs, northBoundLatitude, southBoundLatitude, eastBoundLongitude, westBoundLongitude); - String downloadUrl = null; - try { - downloadUrl = simpleWfsService.getFeatureRequestAsString(serviceUrl, featureType, bbox, maxFeatures, srsName, outputFormat); - } catch (Exception ex) { - log.warn(String.format("Exception generating service request for '%2$s' from '%1$s': %3$s", serviceUrl, featureType, ex)); - } - return downloadUrl; - } - case "NCSS": { - String netcdfsubsetserviceDimensions = "&spatial=bb" + - "&north="+ northBoundLatitude + - "&south=" + southBoundLatitude + - "&west=" + westBoundLongitude + - "&east="+ eastBoundLongitude; - return url + "?var=" + name + netcdfsubsetserviceDimensions; - } - default: - return url; - } - } - - -} diff --git a/src/main/java/org/auscope/portal/server/web/controllers/ScriptBuilderController.java b/src/main/java/org/auscope/portal/server/web/controllers/ScriptBuilderController.java deleted file mode 100644 index ecf70c2de..000000000 --- a/src/main/java/org/auscope/portal/server/web/controllers/ScriptBuilderController.java +++ /dev/null @@ -1,433 +0,0 @@ -/* - * This file is part of the AuScope Virtual Rock Lab (VRL) project. - * Copyright (c) 2009 ESSCC, The University of Queensland - * - * Licensed under the terms of the GNU Lesser General Public License. - */ -package org.auscope.portal.server.web.controllers; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.services.csw.SearchFacet; -import org.auscope.portal.core.services.csw.SearchFacet.Comparison; -import org.auscope.portal.core.util.FileIOUtil; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.LintResult; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.auscope.portal.server.web.service.ScmEntryService; -import org.auscope.portal.server.web.service.ScriptBuilderService; -import org.auscope.portal.server.web.service.SolutionResponse; -import org.auscope.portal.server.web.service.TemplateLintService; -import org.auscope.portal.server.web.service.TemplateLintService.TemplateLanguage; -import org.auscope.portal.server.web.service.scm.Problem; -import org.auscope.portal.server.web.service.scm.Solution; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.stereotype.Controller; -import org.springframework.ui.ModelMap; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; -import org.springframework.web.servlet.ModelAndView; - -/** - * Controller for the ScriptBuilder view. - * - * @author Cihan Altinay - * @author Josh Vote - modified for usage with VEGL - * @author Richard Goh - */ -@Controller -public class ScriptBuilderController extends BaseCloudController { - - private final Log logger = LogFactory.getLog(getClass()); - - /** Handles saving scripts against a job*/ - private ScriptBuilderService sbService; - - /** Handles SCM entries. */ - private ScmEntryService scmEntryService; - - /** Script checking */ - private TemplateLintService templateLintService; - - private PortalUserService userService; - - @Autowired - private NCIDetailsService nciDetailsService; - - /** - * Creates a new instance - * - * @param jobFileService - * @param jobManager - */ - @Autowired - public ScriptBuilderController(ScriptBuilderService sbService, - PortalUserService userService, - VEGLJobManager jobManager, - ScmEntryService scmEntryService, - TemplateLintService templateLintService, - CloudStorageService[] cloudStorageServices, - CloudComputeService[] cloudComputeServices, - @Value("${cloud.vm.sh}") String vmSh, - @Value("${cloud.vm-shutdown.sh}") String vmShutdownSh) { - super(cloudStorageServices, cloudComputeServices, jobManager,vmSh,vmShutdownSh); - this.sbService = sbService; - this.userService= userService; - this.scmEntryService = scmEntryService; - this.templateLintService = templateLintService; - } - - /** - * Writes provided script text to a file in the specified jobs stage in directory - * - * @param jobId - * @param sourceText - * @param solution - * @return A JSON encoded response with a success flag - */ - @RequestMapping("/secure/saveScript.do") - public ModelAndView saveScript(@RequestParam("jobId") String jobId, - @RequestParam("sourceText") String sourceText, - @RequestParam("solutions") Set solutions) { - PortalUser user = userService.getLoggedInUser(); - if (sourceText == null || sourceText.trim().isEmpty()) { - return generateJSONResponseMAV(false, null, "No source text specified"); - } - - VEGLJob job = attemptGetJob(Integer.parseInt(jobId), user); - if (job == null) { - return generateJSONResponseMAV(false); - } - - try { - sbService.saveScript(job, sourceText, user); - } catch (PortalServiceException ex) { - logger.warn("Unable to save job script for job with id " + jobId + ": " + ex.getMessage()); - logger.debug("error:", ex); - return generateJSONResponseMAV(false, null, "Unable to write script file"); - } - - // Update job with vmId for solution if we have one. - try { - scmEntryService.updateJobForSolution(job, solutions, user); - } - catch (PortalServiceException e) { - logger.warn("Failed to update job (" + jobId + ") for solutions (" + - solutions + "): " + e.getMessage()); - logger.debug("error: ", e); - return generateJSONResponseMAV(false, null, "Unable to write script file"); - } - - return generateJSONResponseMAV(true, null, ""); - } - - /** - * Gets the contents of a saved job's script file. - * @param jobId - * @return A JSON encoded response which contains the contents of a saved job's script file - */ - @RequestMapping("/getSavedScript.do") - public ModelAndView getSavedScript(@RequestParam("jobId") String jobId) { - logger.debug("getSavedScript with jobId: " + jobId); - PortalUser user = userService.getLoggedInUser(); - String script = null; - - VEGLJob job = attemptGetJob(Integer.parseInt(jobId), user); - if (job == null) { - return generateJSONResponseMAV(false); - } - - try { - script = sbService.loadScript(job, user); - } catch (PortalServiceException ex) { - logger.error("Unable to load saved script for job with id " + jobId, ex); - return generateJSONResponseMAV(false, null, ex.getMessage(), ex.getErrorCorrection()); - } - - return generateJSONResponseMAV(true, script, ""); - } - - /** - * Gets a JSON list of id/name pairs for every available compute service - * that has been properly configured to run with the logged in user - * - * @return - * @throws PortalServiceException - */ - @RequestMapping("/secure/getConfiguredComputeServices.do") - public ModelAndView getComputeServices() throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - List configuredServices = getConfiguredComputeServices(user, nciDetailsService); - List parsedItems = new ArrayList(); - for (CloudComputeService ccs : configuredServices) { - ModelMap mm = new ModelMap(); - mm.put("providerId", ccs.getId()); - mm.put("name", ccs.getName()); - parsedItems.add(mm); - } - ModelMap mm = new ModelMap(); - mm.put("providerId", ""); - mm.put("name", "All Providers"); - parsedItems.add(mm); - - return generateJSONResponseMAV(true, parsedItems, ""); - } - - /** - * Gets a named script template and fills in all named placeholders with the matching key/value pairs - * @param templateName Script name - * @param keys Keys to lookup (corresponds 1-1 with values) - * @param values Values to use in template for placeholders (corresponds 1-1 with keys) - * @return - */ - @RequestMapping("/getTemplatedScript.do") - public ModelAndView getTemplatedScript(@RequestParam("templateName") String templateName, - @RequestParam(value="key", required=false) String[] keys, - @RequestParam(value="value", required=false) String[] values) { - //Turn our KVP inputs into something that we can pass to our service - Map kvpMapping = new HashMap<>(); - if (keys != null && values != null) { - for (int i = 0; i < keys.length && i < values.length; i++) { - kvpMapping.put(keys[i], values[i]); - } - } - - //Load our template file into a string - String templateResource = "/org/auscope/portal/server/scriptbuilder/templates/" + templateName.replaceAll("\\.\\.", "").replaceAll("/",""); - String templateString = null; - - try (InputStream is = this.getClass().getResourceAsStream(templateResource)) { - if (is == null) { - logger.error("Unable to find template resource - " + templateResource); - return generateJSONResponseMAV(false, null, "Requested template does not exist"); - } - templateString = FileIOUtil.convertStreamtoString(is); - } catch (IOException e) { - logger.error("Unable to read template resource - " + templateResource + ":" + e.getMessage()); - logger.debug("Exception:", e); - return generateJSONResponseMAV(false, null, "Internal server error when loading template."); - } - - String finalTemplate = sbService.populateTemplate(templateString, - kvpMapping); - return generateJSONResponseMAV(true, finalTemplate, ""); - } - - /** - * Return a JSON list of problems and their solutions. - * @throws PortalServiceException - */ - @RequestMapping("/secure/getProblems.do") - public ModelAndView getProblems( - @RequestParam(value="field", required=false) String[] rawFields, - @RequestParam(value="value", required=false) String[] rawValues, - @RequestParam(value="type", required=false) String[] rawTypes, - @RequestParam(value="comparison", required=false) String[] rawComparisons) throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - if (rawFields == null) { - rawFields = new String[0]; - } - - if (rawValues == null) { - rawValues = new String[0]; - } - - if (rawTypes == null) { - rawTypes = new String[0]; - } - - if (rawComparisons == null) { - rawComparisons = new String[0]; - } - - if (rawFields.length != rawValues.length || rawFields.length != rawTypes.length || rawFields.length != rawComparisons.length) { - throw new IllegalArgumentException("field/value/type/comparison lengths mismatch"); - } - - //Parse our raw request info into a list of search facets - List> facets = new ArrayList>(); - for (int i = 0; i < rawFields.length; i++) { - Comparison cmp = null; - switch(rawComparisons[i]) { - case "gt": - cmp = Comparison.GreaterThan; - break; - case "lt": - cmp = Comparison.LessThan; - break; - case "eq": - cmp = Comparison.Equal; - break; - default: - throw new IllegalArgumentException("Unknown comparison type: " + rawComparisons[i]); - } - - SearchFacet newFacet = null; - switch(rawTypes[i]) { - case "string": - newFacet = new SearchFacet(rawValues[i], rawFields[i], cmp); - break; - } - - facets.add(newFacet); - } - - - // Get the Solutions from the SSC - List configuredServices = getConfiguredComputeServices(user, nciDetailsService); - SolutionResponse solutions = scmEntryService.getSolutions(facets, configuredServices.toArray(new CloudComputeService[configuredServices.size()])); - - // Group solutions by the problem that they solve. - HashMap configuredProblems = new HashMap<>(); - HashMap unconfiguredProblems = new HashMap<>(); - - for (Solution solution: solutions.getConfiguredSolutions()) { - String problemId = solution.getProblem().getId(); - Problem problem = configuredProblems.get(problemId); - - if (problem == null) { - problem = solution.getProblem(); - problem.setSolutions(new ArrayList()); - configuredProblems.put(problem.getId(), problem); - } - problem.getSolutions().add(solution); - } - - for (Solution solution: solutions.getUnconfiguredSolutions()) { - String problemId = solution.getProblem().getId(); - Problem problem = unconfiguredProblems.get(problemId); - - if (problem == null) { - problem = solution.getProblem(); - problem.setSolutions(new ArrayList()); - unconfiguredProblems.put(problem.getId(), problem); - } - problem.getSolutions().add(solution); - } - - ModelMap result = new ModelMap(); - result.put("configuredProblems", configuredProblems.values()); - result.put("unconfiguredProblems", unconfiguredProblems.values()); - - // Return the result - return generateJSONResponseMAV(true, result, ""); - } - - /** - * Return the details for a solution. - * - * @param solutionId String solution id - * - */ - @RequestMapping("/getSolution.do") - public ModelAndView getSolution(String solutionId) { - Solution solution = scmEntryService.getScmSolution(solutionId); - - // Wrap the data in an array or list until the JSON response - // code is fixed. - return generateJSONResponseMAV(true, new Solution[] {solution}, ""); - } - - /** - * Return a list of solution objects for the corresponding uris. - * - * @param uris Collection of uris to look up - * @return List solution objects - */ - @RequestMapping("/getSolutions.do") - public ModelAndView doGetSolutions(@RequestParam("uris") Set uris) { - ArrayList solutions = new ArrayList<>(); - StringBuilder msg = new StringBuilder(); - - for (String uri: uris) { - Solution solution = scmEntryService.getScmSolution(uri); - if (solution != null) { - solutions.add(solution); - } - else { - msg.append(String.format("No solution found (%s)", uri)) - .append("; \n"); - } - } - - return generateJSONResponseMAV(true, solutions, msg.toString()); - } - - /** - * Return a list of errors/warnings about a template. - * - * Passes the template string to pylint and turns the results into a list of - * error objects. Each entry contains a severity (error, warning etc), - * message string and the location in the code (from, to). - * - * The template language must be one of the following values: - * - python3 - * - python2 - * - * @param template String with template code - * @param lang String identifying template language (default=python3) - * @return List lint result objects - */ - @RequestMapping("/lintTemplate.do") - public ModelAndView doLintTemplate(@RequestParam("template") String template, - @RequestParam(value="lang", - required=false) String lang) { - TemplateLanguage templateLanguage = null; - String msg = "No errors or warnings found."; - List lints = null; - - // Make sure it's a supported language - if (lang == null) { - templateLanguage = TemplateLanguage.PYTHON3; - } - else { - try { - templateLanguage = TemplateLanguage.valueOf(lang.toUpperCase()); - } - catch (IllegalArgumentException ex) { - logger.error("Invalid template language for template linting", ex); - return generateJSONResponseMAV(false, null, "Unable to check unsupported template language."); - } - } - - // Get the linter to do its thing - try { - lints = templateLintService.checkTemplate(template, templateLanguage); - if (lints != null && lints.size() > 0) { - msg = String.format("Found {} issues", lints.size()); - } - } - catch (PortalServiceException ex) { - logger.warn("Template code check failed: " + ex.getMessage(), ex); - return generateJSONResponseMAV(false, null, "Template code check failed: " + ex.getMessage()); - } - - return generateJSONResponseMAV(true, lints, msg); - } - - @ExceptionHandler(AccessDeniedException.class) - @ResponseStatus(value = org.springframework.http.HttpStatus.FORBIDDEN) - public @ResponseBody String handleException(AccessDeniedException e) { - return e.getMessage(); - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/controllers/UserController.java b/src/main/java/org/auscope/portal/server/web/controllers/UserController.java index 800c6a1dc..2413d58e5 100644 --- a/src/main/java/org/auscope/portal/server/web/controllers/UserController.java +++ b/src/main/java/org/auscope/portal/server/web/controllers/UserController.java @@ -1,34 +1,21 @@ package org.auscope.portal.server.web.controllers; import java.io.IOException; -import java.io.StringWriter; import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; - -import jakarta.servlet.http.HttpServletResponse; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.velocity.VelocityContext; -import org.apache.velocity.app.VelocityEngine; import org.auscope.portal.core.server.controllers.BasePortalController; import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetails; import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.NCIDetailsService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; -import org.springframework.http.HttpStatus; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.multipart.MultipartFile; import org.springframework.web.servlet.ModelAndView; /** @@ -37,35 +24,19 @@ */ @Controller public class UserController extends BasePortalController { - private static final String CLOUD_FORMATION_RESOURCE = "org/auscope/portal/server/web/controllers/vl-cloudformation.json.tpl"; - protected final Log logger = LogFactory.getLog(getClass()); @Autowired private PortalUserService userService; - @Autowired - private NCIDetailsService nciDetailsService; - - private VelocityEngine velocityEngine; - private CloudComputeService[] cloudComputeServices; - - @Autowired - private String awsAccount; - private String tacVersion; @Autowired - public UserController(VelocityEngine velocityEngine, - CloudComputeService[] cloudComputeServices, - @Value("${termsconditions.version:1}") String tacVersion) throws PortalServiceException { + public UserController(@Value("${termsconditions.version:1}") String tacVersion) throws PortalServiceException { super(); - this.velocityEngine = velocityEngine; - this.cloudComputeServices = cloudComputeServices; this.tacVersion = tacVersion; } - /** * Gets user metadata for the currently logged in user * @param user @@ -81,10 +52,7 @@ public ModelAndView getUser() { userObj.put("id", user.getId()); userObj.put("email", user.getEmail()); userObj.put("fullName", user.getFullName()); - userObj.put("arnExecution", user.getArnExecution()); - userObj.put("arnStorage", user.getArnStorage()); userObj.put("acceptedTermsConditions", user.getAcceptedTermsConditions()); - userObj.put("awsKeyName", user.getAwsKeyName()); return generateJSONResponseMAV(true, userObj, ""); } @@ -99,36 +67,19 @@ public ModelAndView getUser() { */ @RequestMapping("/secure/setUser.do") public ModelAndView setUser( - @RequestParam(required=false, value="arnExecution") String arnExecution, - @RequestParam(required=false, value="arnStorage") String arnStorage, - @RequestParam(required=false, value="acceptedTermsConditions") Integer acceptedTermsConditions, - @RequestParam(required=false, value="awsKeyName") String awsKeyName) { + @RequestParam(required=false, value="acceptedTermsConditions") Integer acceptedTermsConditions) { PortalUser user = userService.getLoggedInUser(); if (user == null) { return generateJSONResponseMAV(false); } boolean modified = false; - if (!StringUtils.isEmpty(arnExecution)) { - user.setArnExecution(arnExecution); - modified = true; - } - - if (!StringUtils.isEmpty(arnStorage)) { - user.setArnStorage(arnStorage); - modified = true; - } if (acceptedTermsConditions != null) { user.setAcceptedTermsConditions(acceptedTermsConditions); modified = true; } - if (!StringUtils.equals(user.getAwsKeyName(), awsKeyName)) { - user.setAwsKeyName(awsKeyName); - modified = true; - } - if (modified) { userService.saveUser(user); } @@ -156,105 +107,4 @@ public ModelAndView getTermsConditions() { } } - @RequestMapping("/secure/getCloudFormationScript.do") - public void getCloudFormationScript(HttpServletResponse response) throws IOException { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - response.sendError(HttpStatus.UNAUTHORIZED.value()); - return; - } - - Map model = new HashMap<>(); - model.put("s3Bucket", user.getS3Bucket()); - model.put("awsSecret", user.getAwsSecret()); - model.put("awsAccount", awsAccount); - VelocityContext velocityContext = new VelocityContext(model); - StringWriter stringWriter = new StringWriter(); - velocityEngine.mergeTemplate(CLOUD_FORMATION_RESOURCE, "UTF-8", velocityContext, stringWriter); - - String cloudFormationScript = stringWriter.toString(); - - response.setContentType("application/octet"); - response.setHeader("Content-Disposition", "inline; filename=vgl-cloudformation.json;"); - - try { - response.getOutputStream().write(cloudFormationScript.getBytes(StandardCharsets.UTF_8)); - } catch (IOException e) { - response.sendError(HttpStatus.INTERNAL_SERVER_ERROR.value()); - } - } - - @RequestMapping("/secure/getNCIDetails.do") - public ModelAndView getNCIDetails() throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - return generateJSONResponseMAV(false); - } - ModelMap detailsObj = new ModelMap(); - NCIDetails details = nciDetailsService.getByUser(user); - if(details != null) { - try { - detailsObj.put("nciUsername", details.getUsername()); - detailsObj.put("nciProject", details.getProject()); - detailsObj.put("nciKey", details.getKey()); - } catch(Exception e) { - logger.error("Unable to decrypt NCI details: " + e.getLocalizedMessage()); - } - return generateJSONResponseMAV(true, detailsObj, ""); - } - return generateJSONResponseMAV(false); - } - - @RequestMapping("/secure/setNCIDetails.do") - public ModelAndView setNCIDetails( - @RequestParam(required=false, value="nciUsername") String username, - @RequestParam(required=false, value="nciProject") String project, - @RequestParam(required=false, value="nciKey") MultipartFile key) throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - return generateJSONResponseMAV(false); - } - NCIDetails details = nciDetailsService.getByUser(user); - if(details == null) { - details = new NCIDetails(); - details.setUser(user); - } - boolean modified = false; - try { - if (!StringUtils.isEmpty(username) || !StringUtils.equals(details.getUsername(), username)) { - details.setUsername(username); - modified = true; - } - if (!StringUtils.isEmpty(project) || !StringUtils.equals(details.getProject(), project)) { - details.setProject(project); - modified = true; - } - if (key != null ) { - //String keyString = key.getFileItem().getString(); - String keyString = new String(key.getBytes()); - if (!StringUtils.isEmpty(keyString) || !StringUtils.equals(details.getKey(), keyString)) { - details.setKey(keyString); - modified = true; - } - } - } catch(Exception e) { - logger.error(e.getLocalizedMessage()); - } - - if (modified) { - nciDetailsService.saveNCIDetails(details); - } - return generateJSONResponseMAV(true); - } - - @RequestMapping("/secure/getHasConfiguredComputeServices.do") - public ModelAndView getHasConfiguredComputeServices() throws PortalServiceException { - PortalUser user = userService.getLoggedInUser(); - if (user == null) { - return generateJSONResponseMAV(false); - } - boolean hasConfigured = user.configuredServicesStatus(nciDetailsService, cloudComputeServices); - return generateJSONResponseMAV(hasConfigured); - } - } diff --git a/src/main/java/org/auscope/portal/server/web/repositories/NCIDetailsEncRepository.java b/src/main/java/org/auscope/portal/server/web/repositories/NCIDetailsEncRepository.java deleted file mode 100644 index b1dfd1217..000000000 --- a/src/main/java/org/auscope/portal/server/web/repositories/NCIDetailsEncRepository.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.auscope.portal.server.web.repositories; - -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetailsEnc; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface NCIDetailsEncRepository extends JpaRepository { - - NCIDetailsEnc findByUser(PortalUser user); -} diff --git a/src/main/java/org/auscope/portal/server/web/repositories/VEGLJobRepository.java b/src/main/java/org/auscope/portal/server/web/repositories/VEGLJobRepository.java deleted file mode 100644 index 3fd7aa877..000000000 --- a/src/main/java/org/auscope/portal/server/web/repositories/VEGLJobRepository.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.auscope.portal.server.web.repositories; - -import java.util.List; - -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Query; -import org.springframework.stereotype.Repository; - -@Repository -public interface VEGLJobRepository extends JpaRepository { - - - @Query("SELECT j FROM VEGLJob j WHERE j.seriesId= ?1 AND j.emailAddress = ?2 AND LOWER(j.status) <> 'deleted'") - List findBySeriesIdAndEmail(Integer seriesId, String email); - - @Query("SELECT j FROM VEGLJob j WHERE j.emailAddress = ?1 AND LOWER(j.status) <> 'deleted'") - List findByEmail(String email); - - @Query("SELECT j FROM VEGLJob j WHERE LOWER(j.status)='" + JobBuilderController.STATUS_PENDING + - "' OR LOWER(j.status)='" + JobBuilderController.STATUS_ACTIVE + "'") - List findPendingOrActiveJobs(); - - @Query("SELECT j FROM VEGLJob j WHERE LOWER(j.status)='" + JobBuilderController.STATUS_INQUEUE + "'") - List findInqueueJobs(); - -} diff --git a/src/main/java/org/auscope/portal/server/web/repositories/VEGLSeriesRepository.java b/src/main/java/org/auscope/portal/server/web/repositories/VEGLSeriesRepository.java deleted file mode 100644 index 841bfbe73..000000000 --- a/src/main/java/org/auscope/portal/server/web/repositories/VEGLSeriesRepository.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.auscope.portal.server.web.repositories; - -import org.auscope.portal.server.vegl.VEGLSeries; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface VEGLSeriesRepository extends JpaRepository { - - -} diff --git a/src/main/java/org/auscope/portal/server/web/repositories/VGLDataPurchaseRepository.java b/src/main/java/org/auscope/portal/server/web/repositories/VGLDataPurchaseRepository.java deleted file mode 100644 index 4a01324ea..000000000 --- a/src/main/java/org/auscope/portal/server/web/repositories/VGLDataPurchaseRepository.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.auscope.portal.server.web.repositories; - -import java.util.List; - -import org.auscope.portal.server.vegl.VGLDataPurchase; -import org.auscope.portal.server.web.security.PortalUser; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface VGLDataPurchaseRepository extends JpaRepository { - - List findByParentOrderByDateDesc(PortalUser user); -} diff --git a/src/main/java/org/auscope/portal/server/web/repositories/VGLJobAuditLogRepository.java b/src/main/java/org/auscope/portal/server/web/repositories/VGLJobAuditLogRepository.java deleted file mode 100644 index bcefb98eb..000000000 --- a/src/main/java/org/auscope/portal/server/web/repositories/VGLJobAuditLogRepository.java +++ /dev/null @@ -1,13 +0,0 @@ -package org.auscope.portal.server.web.repositories; - -import java.util.List; - -import org.auscope.portal.server.vegl.VGLJobAuditLog; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface VGLJobAuditLogRepository extends JpaRepository { - - List findByJobId(Integer id); -} diff --git a/src/main/java/org/auscope/portal/server/web/repositories/VGLJobPurchaseRepository.java b/src/main/java/org/auscope/portal/server/web/repositories/VGLJobPurchaseRepository.java deleted file mode 100644 index 18f58461f..000000000 --- a/src/main/java/org/auscope/portal/server/web/repositories/VGLJobPurchaseRepository.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.auscope.portal.server.web.repositories; - -import java.util.List; - -import org.auscope.portal.server.vegl.VGLJobPurchase; -import org.auscope.portal.server.web.security.PortalUser; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface VGLJobPurchaseRepository extends JpaRepository { - - List findByParent(PortalUser user); -} diff --git a/src/main/java/org/auscope/portal/server/web/repositories/VLScmSnapshotRepository.java b/src/main/java/org/auscope/portal/server/web/repositories/VLScmSnapshotRepository.java deleted file mode 100644 index b6d268bad..000000000 --- a/src/main/java/org/auscope/portal/server/web/repositories/VLScmSnapshotRepository.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.auscope.portal.server.web.repositories; - -import java.util.List; - -import org.auscope.portal.server.vegl.VLScmSnapshot; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface VLScmSnapshotRepository extends JpaRepository { - - List findByScmEntryId(String scmEntryId); - VLScmSnapshot findByScmEntryIdAndComputeServiceId(String scmEntryId, String computeServiceId); -} diff --git a/src/main/java/org/auscope/portal/server/web/security/NCIDetails.java b/src/main/java/org/auscope/portal/server/web/security/NCIDetails.java deleted file mode 100644 index 953edc3fa..000000000 --- a/src/main/java/org/auscope/portal/server/web/security/NCIDetails.java +++ /dev/null @@ -1,103 +0,0 @@ -package org.auscope.portal.server.web.security; - -import java.io.Serializable; - -import org.auscope.portal.core.cloud.CloudJob; - - -public class NCIDetails implements Serializable { - - private static final long serialVersionUID = -7219385540898450290L; - - public final static String PROPERTY_NCI_USER = "nci_user"; - public final static String PROPERTY_NCI_KEY = "nci_key"; - public final static String PROPERTY_NCI_PROJECT = "nci_project"; - - private Integer id; - private PortalUser user; - private String username; - private String project; - private String key; - - public NCIDetails() { - super(); - } - - public Integer getId() { - return id; - } - - public void setId(Integer id) { - this.id = id; - } - - /** - * The associated PortalUser - * @return - */ - public PortalUser getUser() { - return user; - } - - /** - * The associated PortalUser - * @param user - */ - public void setUser(PortalUser user) { - this.user = user; - } - - /** - * The user's NCI username (encrypted) - * @return - */ - public String getUsername() { - return this.username; - } - - /** - * The user's NCI username (encrypted) - * @param nciUsername - */ - public void setUsername(String username) { - this.username = username; - } - - /** - * The default project for the NCI user (encrypted) - * @return - */ - public String getProject() { - return project; - } - - /** - * The default project for the NCI user (encrypted) - * @param project - */ - public void setProject(String project) { - this.project = project; - } - - /** - * The user's NCI key (encrypted) - * @return - */ - public String getKey() { - return this.key; - } - - /** - * The user's NCI key (encrypted) - * @param nciUsername - */ - public void setKey(String key) { - this.key = key; - } - - public void applyToJobProperties(CloudJob job) throws Exception { - job.setProperty(PROPERTY_NCI_USER, getUsername()); - job.setProperty(PROPERTY_NCI_PROJECT, getProject()); - job.setProperty(PROPERTY_NCI_KEY, getKey()); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/security/NCIDetailsEnc.java b/src/main/java/org/auscope/portal/server/web/security/NCIDetailsEnc.java deleted file mode 100644 index 286c517b0..000000000 --- a/src/main/java/org/auscope/portal/server/web/security/NCIDetailsEnc.java +++ /dev/null @@ -1,114 +0,0 @@ -package org.auscope.portal.server.web.security; - -import java.io.Serializable; - -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.OneToOne; -import jakarta.persistence.Table; - -@Entity -@Table(name = "nci_details") -public class NCIDetailsEnc implements Serializable { - - private static final long serialVersionUID = -1617534178282032823L; - - public final static String PROPERTY_NCI_USER = "nci_user"; - public final static String PROPERTY_NCI_KEY = "nci_key"; - public final static String PROPERTY_NCI_PROJECT = "nci_project"; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - @OneToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "user") - private PortalUser user; - @Column(name="nci_username") - private byte[] username; - @Column(name="nci_project") - private byte[] project; - @Column(name="nci_key") - private byte[] key; - - public NCIDetailsEnc() { - super(); - } - - public Integer getId() { - return id; - } - - public void setId(Integer id) { - this.id = id; - } - - /** - * The associated PortalUser - * @return - */ - public PortalUser getUser() { - return user; - } - - /** - * The associated PortalUser - * @param user - */ - public void setUser(PortalUser user) { - this.user = user; - } - - /** - * The user's NCI username (encrypted) - * @return - */ - public byte[] getUsername() { - return this.username; - } - - /** - * The user's NCI username (encrypted) - * @param nciUsername - */ - public void setUsername(byte[] username) { - this.username = username; - } - - /** - * The default project for the NCI user (encrypted) - * @return - */ - public byte[] getProject() { - return project; - } - - /** - * The default project for the NCI user (encrypted) - * @param project - */ - public void setProject(byte[] project) { - this.project = project; - } - - /** - * The user's NCI key (encrypted) - * @return - */ - public byte[] getKey() { - return this.key; - } - - /** - * The user's NCI key (encrypted) - * @param nciUsername - */ - public void setKey(byte[] key) { - this.key = key; - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/security/PortalOAuth2UserService.java b/src/main/java/org/auscope/portal/server/web/security/PortalOAuth2UserService.java index f40a9b42e..766beda04 100644 --- a/src/main/java/org/auscope/portal/server/web/security/PortalOAuth2UserService.java +++ b/src/main/java/org/auscope/portal/server/web/security/PortalOAuth2UserService.java @@ -4,7 +4,7 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang3.StringUtils; import org.auscope.portal.server.web.repositories.PortalUserRepository; import org.auscope.portal.server.web.security.PortalUser.AuthenticationFramework; import org.auscope.portal.server.web.service.PortalUserDetailsService; diff --git a/src/main/java/org/auscope/portal/server/web/security/PortalUser.java b/src/main/java/org/auscope/portal/server/web/security/PortalUser.java index ca70ace18..de000a4e3 100644 --- a/src/main/java/org/auscope/portal/server/web/security/PortalUser.java +++ b/src/main/java/org/auscope/portal/server/web/security/PortalUser.java @@ -6,18 +6,12 @@ import jakarta.persistence.CascadeType; import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; import jakarta.persistence.Id; import jakarta.persistence.OneToMany; -import jakarta.persistence.OneToOne; import jakarta.persistence.Table; import jakarta.persistence.Transient; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; import org.auscope.portal.server.bookmark.BookMark; -import org.auscope.portal.server.web.controllers.BaseCloudController; -import org.auscope.portal.server.web.service.NCIDetailsService; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.userdetails.UserDetails; @@ -43,14 +37,7 @@ public enum AuthenticationFramework { GOOGLE, GITHUB, AAF } @OneToMany(mappedBy = "parent", cascade=CascadeType.ALL, orphanRemoval = true) private List authorities; - private String arnExecution; - private String arnStorage; - private String s3Bucket; - private String awsSecret; - private String awsKeyName; private Integer acceptedTermsConditions; - @OneToOne(mappedBy = "user", fetch = FetchType.LAZY, cascade = CascadeType.ALL) - private NCIDetailsEnc nciDetailsEnc; @Transient private AuthenticationFramework authentication; @@ -74,7 +61,6 @@ public PortalUser(String id, String fullName, String email, this.bookMarks = bookMarks; } - /** * Gets the ID as reported by the remote authentication service (Probably google). * AAF doesn't return a unique ID so we use the user's email address in this case. @@ -93,22 +79,6 @@ public void setId(String id) { this.id = id; } - /** - * The name of the AWS S3 bucket where this user's job data will be written - * @return - */ - public String getS3Bucket() { - return s3Bucket; - } - - /** - * The name of the AWS S3 bucket where this user's job data will be written - * @param s3Bucket - */ - public void setS3Bucket(String s3Bucket) { - this.s3Bucket = s3Bucket; - } - /** * Gets a string representing the full name of the user * @@ -143,22 +113,6 @@ public void setEmail(String email) { this.email = email; } - /** - * The keyname to be used for VMs started by this user (can be null) - * @return - */ - public String getAwsKeyName() { - return awsKeyName; - } - - /** - * The keyname to be used for VMs started by this user (can be null) - * @param awsKeyName - */ - public void setAwsKeyName(String awsKeyName) { - this.awsKeyName = awsKeyName; - } - /** * The version of the T&Cs that the user has last accepted (or null if none) * @return @@ -175,22 +129,6 @@ public void setAcceptedTermsConditions(Integer acceptedTermsConditions) { this.acceptedTermsConditions = acceptedTermsConditions; } - /** - * - * @return - */ - public NCIDetailsEnc getNciDetailsEnc() { - return nciDetailsEnc; - } - - /** - * - * @param nciDetails - */ - public void setNciDetailsEnc(NCIDetailsEnc nciDetails) { - this.nciDetailsEnc = nciDetails; - } - @Override public String toString() { String strId = id == null ? "null" : id; @@ -219,30 +157,6 @@ public void setAuthorities(List authorities) { } } - public String getArnExecution() { - return arnExecution; - } - - public void setArnExecution(String arnExecution) { - this.arnExecution = arnExecution; - } - - public String getArnStorage() { - return arnStorage; - } - - public void setArnStorage(String arnStorage) { - this.arnStorage = arnStorage; - } - - public String getAwsSecret() { - return awsSecret; - } - - public void setAwsSecret(String awsSecret) { - this.awsSecret = awsSecret; - } - public AuthenticationFramework getAuthentication() { return this.authentication; } @@ -290,19 +204,6 @@ public boolean acceptedTermsConditionsStatus() { acceptedTermsConditions > 0; } - /** - * Returns true iff this PortalUser instance has at least 1 compute service - * which has been properly configured. - * - * @param nciDetailsService - * @param cloudComputeServices - * @return - * @throws PortalServiceException - */ - public boolean configuredServicesStatus(NCIDetailsService nciDetailsService, CloudComputeService[] cloudComputeServices) throws PortalServiceException { - return !BaseCloudController.getConfiguredComputeServices(this, nciDetailsService, cloudComputeServices).isEmpty(); - } - @Override public String getPassword() { return null; diff --git a/src/main/java/org/auscope/portal/server/web/service/ANVGLFileStagingService.java b/src/main/java/org/auscope/portal/server/web/service/ANVGLFileStagingService.java deleted file mode 100644 index 7462bf446..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/ANVGLFileStagingService.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.io.File; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.StagingInformation; -import org.auscope.portal.core.services.cloud.FileStagingService; -import org.auscope.portal.server.vegl.VEGLJob; - -/** - * Created by wis056 on 3/10/2014. - * - * XXX Check needed - * - */ -public class ANVGLFileStagingService extends FileStagingService { - @SuppressWarnings("unused") - private final Log logger = LogFactory.getLog(getClass()); - - public ANVGLFileStagingService(StagingInformation stagingInformation) { - super(stagingInformation); - } - - public File createLocalFile(String fileName, VEGLJob job) { - String directory = FileStagingService.pathConcat(stagingInformation.getStageInDirectory(), FileStagingService.getBaseFolderForJob(job)); - String destinationPath = pathConcat(directory, fileName); - File file = new File(destinationPath); - return file; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/ANVGLProvenanceService.java b/src/main/java/org/auscope/portal/server/web/service/ANVGLProvenanceService.java deleted file mode 100644 index 114cc6afa..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/ANVGLProvenanceService.java +++ /dev/null @@ -1,361 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.http.NameValuePair; -import org.apache.http.message.BasicNameValuePair; -import org.apache.http.client.utils.URLEncodedUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.http.Header; -import org.apache.http.HttpResponse; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.scm.Solution; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import au.csiro.promsclient.Activity; -import au.csiro.promsclient.Entity; -import au.csiro.promsclient.ExternalReport; -import au.csiro.promsclient.ProvenanceReporter; -import au.csiro.promsclient.Report; -import au.csiro.promsclient.ServiceEntity; - -/** - * Created by Catherine Wise (wis056) on 3/10/2014. Modified by Stuart Woodman - * (woo392) for ANVGL. - * - * A Service for reporting provenance information for storage in a PROMS - * instance and also included in downloads. - */ -public class ANVGLProvenanceService { - /** Logger for this class. */ - private static final Log LOGGER = LogFactory.getLog(ANVGLProvenanceService.class); - /** Default name for the half-baked provenance uploaded to the cloud. */ - private static final String ACTIVITY_FILE_NAME = "activity.ttl"; - /** Protocol for email URIs */ - private static final String MAIL = "mailto:"; - /** Document type for output. */ - private static final String TURTLE_FORMAT = "TTL"; - - private URI PROMSService = null; - private String PROMSReportingSystem = ""; - - /** - * URL of the current webserver. Will need to be set by classes using this - * service. - */ - public void setServerURL(String serverURL) { - ANVGLServerURL.INSTANCE.set(serverURL); - } - - public String serverURL() { - return ANVGLServerURL.INSTANCE.get(); - } - - /** The service to allow us to write temporary local files. */ - private ANVGLFileStagingService anvglFileStagingService; - /** The service to allow us to write files to the cloud. */ - private CloudStorageService[] cloudStorageServices; - - /** - * Autowired constructor for Spring -- don't use this directly, you should - * be able to autowire this into your own class. - * - * @param newAnvglFileStagingService - * set the local file store must not be null - * @param newCloudStorageServices - * set the cloud file store must not be null - */ - @Autowired - public ANVGLProvenanceService(final ANVGLFileStagingService anvglFileStagingService, - final CloudStorageService[] cloudStorageServices, - @Value("${cloud.proms.report.url}") String promsUrl, - @Value("${cloud.proms.reportingsystem.uri}") String promsReportingSystemUri) { - this.anvglFileStagingService = anvglFileStagingService; - this.cloudStorageServices = cloudStorageServices; - this.PROMSReportingSystem = promsReportingSystemUri; - try { - this.PROMSService = new URI(promsUrl); - } catch (URISyntaxException e) { - LOGGER.error(e.getMessage()); - } - } - - /** - * Create the half-baked provenance information for this job just before it - * starts. This will create the provenance information on the inputs, job - * and script, but not on the outputs (as they don't exist yet). - * - * @param job - * The Virtual Labs Job we want to report provenance on. It - * should be just about to execute, but not yet have started. - * @return The TURTLE text. - */ - public String createActivity(final VEGLJob job, final Set solutions, PortalUser user) { - String jobURL = jobURL(job, serverURL()); - Activity anvglJob = null; - Set inputs = createEntitiesForInputs(job, solutions, user); - try { - anvglJob = new Activity().setActivityUri(new URI(jobURL)).setTitle(job.getName()) - .setDescription(job.getDescription()).setStartedAtTime(new Date()) - .setWasAssociatedWith(new URI(user.getId())).setUsedEntities(inputs); - } catch (URISyntaxException ex) { - LOGGER.error(String.format("Error parsing server name %s into URI.", jobURL), ex); - } - StringWriter out = new StringWriter(); - Model graph = anvglJob.getGraph(); - if (graph != null) { - uploadModel(graph, job); - anvglJob.getGraph().write(out, TURTLE_FORMAT); - } - return out.toString(); - } - - /** - * Upload a complete or partially complete model to the cloud for storage. - * - * @param model - * The RDF model to serialize and upload to the cloud. - * @param job - * The virtual lab job this model refers to. - */ - protected final void uploadModel(final Model model, final VEGLJob job) { - if (model != null) { - try { - File tmpActivity = anvglFileStagingService.createLocalFile(ACTIVITY_FILE_NAME, job); - FileWriter fileWriter = new FileWriter(tmpActivity); - model.write(fileWriter, TURTLE_FORMAT); - fileWriter.close(); - File[] files = { tmpActivity }; - - CloudStorageService cloudStorageService = getStorageService(job); - cloudStorageService.uploadJobFiles(job, files); - } catch (IOException | PortalServiceException e) { - // JAVA RAGE - LOGGER.error(e.getLocalizedMessage()); - } - } - } - - /** - * Looks through the list of all cloud storage providers and finds one we - * can use for this job. - * - * @param job - * The virtual lab job we want to know the appropriate cloud - * providers for. - * @return The first cloud provider selected for this job, or null if none - * has yet been assigned. - */ - protected final CloudStorageService getStorageService(final VEGLJob job) { - for (CloudStorageService s : cloudStorageServices) { - if (s.getId().equals(job.getStorageServiceId())) { - return s; - } - } - return null; - } - - /** - * Constructs a full URL which can be used to get information (JSON) about a - * job. - * - * @param job - * The virtual labs job we want a url for. - * @param serverURL - * URL of the webserver. - * @return The URL for this job. - */ - protected static String jobURL(final VEGLJob job, final String serverURL) { - return String.format("%s/secure/getJobObject.do?jobId=%s", serverURL, job.getId()); - } - - /** - * Get a unique url for this output file. - * - * @param job - * The virtual labs job this output belongs to. - * @param outputInfo - * The metadata for the output file. - * @param serverURL - * URL of the webserver. - * @return A URL for the file. May or may not be public. - */ - protected static String outputURL(final VEGLJob job, final CloudFileInformation outputInfo, final String serverURL) { - List params = new ArrayList(); - params.add(new BasicNameValuePair("jobId", job.getId().toString())); - params.add(new BasicNameValuePair("key", outputInfo.getCloudKey())); - String paramString = URLEncodedUtils.format(params, "UTF-8"); - return String.format("%s/secure/jobFile.do?%s", serverURL, paramString); - } - - /** - * Looks through the input files listed for a job and create appropriate - * PROV-O Entities for them. - * - * @param job - * The virtual labs job we want to examine the inputs of. - * @return An array of PROV-O entities. May be empty, but won't be null. - */ - public Set createEntitiesForInputs(final VEGLJob job, final Set solutions, PortalUser user) { - Set inputs = new HashSet<>(); - // Downloads first - try { - for (VglDownload dataset : job.getJobDownloads()) { - URI dataURI = new URI(dataset.getUrl()); - URI baseURI = new URI(dataURI.getScheme() + "://" + dataURI.getAuthority() + dataURI.getPath()); - inputs.add((ServiceEntity) new ServiceEntity().setQuery(dataURI.getQuery()).setServiceBaseUri(baseURI) - .setDataUri(dataURI).setDescription(dataset.getDescription()) - .setWasAttributedTo(new URI(user.getId())).setTitle(dataset.getName())); - LOGGER.debug("New Input: " + dataset.getUrl()); - } - } catch (URISyntaxException ex) { - LOGGER.error( - String.format("Error parsing data source urls %s into URIs.", job.getJobDownloads().toString()), - ex); - } - // Then extra files - try { - CloudStorageService cloudStorageService = getStorageService(job); - CloudFileInformation[] fileInformationSet; - fileInformationSet = cloudStorageService.listJobFiles(job); - - for (CloudFileInformation information : fileInformationSet) { - URI inputURI = new URI(outputURL(job, information, serverURL())); - LOGGER.trace("New Input: " + inputURI.toString()); - inputs.add(new Entity().setDataUri(inputURI).setWasAttributedTo(new URI(user.getId()))); - } - } catch (PortalServiceException e) { - LOGGER.error(String.format("Unable to retrieve upload file information for job: %s", e)); - } catch (URISyntaxException ex) { - LOGGER.error( - String.format("Error parsing data source urls %s into URIs.", job.getJobDownloads().toString()), - ex); - } - - if (solutions != null) { - for (Solution solution: solutions) { - try { - URI dataURI = new URI(solution.getUri()); - inputs.add(new Entity().setWasAttributedTo(new URI(user.getId())).setEntityUri(dataURI) - .setDescription(solution.getDescription()).setCreated(solution.getCreatedAt()) - .setTitle(solution.getName()).setMetadataUri(dataURI)); - } catch (URISyntaxException ex) { - LOGGER.error(String.format("Error parsing data source urls %s into URIs.", solution.getUri()), ex); - } - } - } - return inputs; - } - - public HttpResponse generateAndSaveReport(Activity activity, VEGLJob job) { - HttpResponse response = null; - //String server = ANVGLServerURL.INSTANCE.get(); - try { - Report report = new ExternalReport().setActivity(activity).setTitle(job.getName()) - .setGeneratedAtTime(new Date()).setNativeId(Integer.toString(job.getId())) - .setReportingSystemUri(new URI(PROMSReportingSystem)); - ProvenanceReporter reporter = new ProvenanceReporter(); - response = reporter.postReport(PROMSService, report); - this.uploadModel(report.getGraph(), job); - StringWriter stringWriter = new StringWriter(); - report.getGraph().write(new PrintWriter(stringWriter), "TURTLE"); - String reportString = stringWriter.toString(); - LOGGER.trace(reportString); - LOGGER.trace(response); - } catch (Exception e) { - LOGGER.error(e.getMessage()); - } - return response; - } - - /** - * Takes a completed job and finishes creating the provenance record, and - * uploads it to the cloud. The job *must* have had - * {@link #createActivity(ANVGLJob, Solution, PortalUser) createActivity} - * called with it already. Otherwise it can't collect the relevant - * information, and won't do anything. - * - * @param job - * Completed virtual labs job, about which we will finish our - * provenance gathering. - * @return the URL of the PROMS report if successful, empty String otherwise - */ - public String createEntitiesForOutputs(final VEGLJob job) { - Set outputs = new HashSet<>(); - Set potentialOutputs = new HashSet<>(); - CloudStorageService cloudStorageService = getStorageService(job); - CloudFileInformation[] fileInformationSet; - Activity activity = null; - String reportLink = ""; - try { - fileInformationSet = cloudStorageService.listJobFiles(job); - for (CloudFileInformation information : fileInformationSet) { - List inputs = job.getJobDownloads(); - List names = new ArrayList<>(); - for (VglDownload input : inputs) { - names.add(input.getName()); - } - if (information.getName().equals(ACTIVITY_FILE_NAME)) { - // Here's our Turtle! - InputStream activityStream = cloudStorageService.getJobFile(job, ACTIVITY_FILE_NAME); - Model model = ModelFactory.createDefaultModel(); - LOGGER.debug("Current server URL: " + serverURL()); - model = model.read(activityStream, serverURL(), TURTLE_FORMAT); - activity = new Activity().setActivityUri(new URI(jobURL(job, serverURL()))).setFromModel(model); - } else if (!names.contains(information.getName())) { - // Ah ha! This must be an output or input. - URI outputURI = new URI(outputURL(job, information, serverURL())); - LOGGER.debug("New input/output: " + outputURI.toString()); - potentialOutputs - .add(new Entity().setDataUri(outputURI).setWasAttributedTo(new URI(MAIL + job.getUser()))); - } - } - } catch (PortalServiceException | URISyntaxException ex) { - LOGGER.error( - String.format("Error parsing data results urls %s into URIs.", job.getJobDownloads().toString()), - ex); - } - - if (activity != null) { - activity.setEndedAtTime(job.getProcessDate()); - for (Entity potentialOutput : potentialOutputs) { - if (activity.usedEntities != null && !activity.usedEntities.contains(potentialOutput)) { - outputs.add(potentialOutput); - LOGGER.trace("Added input from potentials list: " + potentialOutput); - } - } - activity.setGeneratedEntities(outputs); - HttpResponse response = generateAndSaveReport(activity, job); - StringWriter out = new StringWriter(); - activity.getGraph().write(out, TURTLE_FORMAT, serverURL()); - if(response != null) { - Header[] headers = response.getHeaders("Link"); - if(headers.length > 0) { - reportLink = headers[0].getValue(); - reportLink = reportLink.substring(reportLink.indexOf('<') + 1, reportLink.indexOf('>')); - } - } - } - return reportLink; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/ANVGLServerURL.java b/src/main/java/org/auscope/portal/server/web/service/ANVGLServerURL.java deleted file mode 100644 index 8330862b4..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/ANVGLServerURL.java +++ /dev/null @@ -1,25 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.net.URI; -import java.net.URISyntaxException; - -/** - * Created by wis056 on 18/11/2014. - */ -public enum ANVGLServerURL { - INSTANCE; - private String serverURL = null; - - public String get() { - return serverURL; - } - - public void set(String serverURL) { - try { - URI newURL = new URI(serverURL); - this.serverURL = newURL.getScheme() + "://" + newURL.getAuthority(); - } catch (URISyntaxException e) { - this.serverURL = serverURL; - } - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/CloudSubmissionService.java b/src/main/java/org/auscope/portal/server/web/service/CloudSubmissionService.java deleted file mode 100644 index f273dc55b..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/CloudSubmissionService.java +++ /dev/null @@ -1,238 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.util.Date; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.RejectedExecutionException; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import javax.annotation.PostConstruct; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.service.monitor.VGLJobStatusChangeHandler; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -/** - * A service class for handling the provisioning of VM's using a CloudComputeService asynchronously. - * @author Josh Vote (CSIRO) - * - */ -@Service -public class CloudSubmissionService { - - public static final int THREAD_POOL_SIZE = 5; - public static final long QUOTA_RESUBMIT_MINUTES = 30; - - private final Log logger = LogFactory.getLog(getClass()); - @Autowired - private VEGLJobManager jobManager; - @Autowired - private VGLJobStatusChangeHandler vglJobStatusChangeHandler; - private ScheduledExecutorService executor; - private ConcurrentHashMap> submittingJobs; - private long quotaResubmitTime = QUOTA_RESUBMIT_MINUTES; - private TimeUnit quotaResubmitUnits = TimeUnit.MINUTES; - - public CloudSubmissionService() { - this(Executors.newScheduledThreadPool(THREAD_POOL_SIZE)); - } - - public CloudSubmissionService(ScheduledExecutorService executor) { - super(); - this.submittingJobs = new ConcurrentHashMap>(); - this.executor = executor; - } - - @PostConstruct - public void init() { - this.vglJobStatusChangeHandler.getJobStatusLogReader().setCloudSubmissionService(this); - } - - public VEGLJobManager getJobManager() { - return jobManager; - } - - public void setJobManager(VEGLJobManager jobManager) { - this.jobManager = jobManager; - } - - public VGLJobStatusChangeHandler getVglJobStatusChangeHandler() { - return vglJobStatusChangeHandler; - } - - public void setVglJobStatusChangeHandler(VGLJobStatusChangeHandler vglJobStatusChangeHandler) { - this.vglJobStatusChangeHandler = vglJobStatusChangeHandler; - } - - private String generateKey(VEGLJob job, CloudComputeService cloudComputeService) { - return String.format("%1$s-%2$s", job.getId(), cloudComputeService.getId()); - } - - public void setQuotaResubmitTime(long quotaResubmitTime) { - this.quotaResubmitTime = quotaResubmitTime; - } - - public void setQuotaResubmitUnits(TimeUnit quotaResubmitUnits) { - this.quotaResubmitUnits = quotaResubmitUnits; - } - - /** - * Using the internal executor, submits a Runnable for submitting this job using the specified cloudComputeService. - * - * If the submission fails due to quota errors then the runnable will be rescheduled to run in QUOTA_RESUBMIT_MINUTES minutes - * @param cloudComputeService The cloud compute service for recieving the job submission - * @param job The job to be submitted - * @param userDataString The user data string to be sent to the cloud - * @throws PortalServiceException - */ - public void queueSubmission(CloudComputeService cloudComputeService, VEGLJob job, String userDataString) throws PortalServiceException { - SubmissionRunnable runnable = new SubmissionRunnable(cloudComputeService, job, userDataString, jobManager, vglJobStatusChangeHandler, submittingJobs, executor, quotaResubmitTime, quotaResubmitUnits); - try { - //Make sure we synchronize so that updates to the job/cache can't start until - //this future is properly put in the cache - synchronized(submittingJobs) { - Future future = executor.submit(runnable); - submittingJobs.put(generateKey(job, cloudComputeService), future); - } - } catch (RejectedExecutionException ex) { - logger.warn("Unable to start thread for submitting job: " + ex.getMessage()); - logger.debug("Exception:", ex); - throw new PortalServiceException("Unable to start thread for submitting job", ex); - } - } - - /** - * Dequeues the specified job from the specified compute service submission queue. If the job is currently - * submitting, this will have no effect. - * @param job - * @param cloudComputeService - */ - public void dequeueSubmission(VEGLJob job, CloudComputeService cloudComputeService) { - Future future = submittingJobs.get(generateKey(job, cloudComputeService)); - if (future != null) { - submittingJobs.remove(generateKey(job, cloudComputeService)); - future.cancel(false); - } - } - - /** - * Returns true if the specified job is submitting to the specified cloudComputeService. - * - * Any updates to the internal cache will be synchronized against job status updates so if this - * method returns false then you can be certain that the underlying job has been saved to the DB - * - * @param job - * @param cloudComputeService - * @return - */ - public boolean isSubmitting(VEGLJob job, CloudComputeService cloudComputeService) { - synchronized(submittingJobs) { - return submittingJobs.containsKey(generateKey(job, cloudComputeService)); - } - } - - private class SubmissionRunnable implements Runnable { - private CloudComputeService cloudComputeService; - private VEGLJob curJob; - private String userDataString; - private VEGLJobManager jobManager; - private VGLJobStatusChangeHandler vglJobStatusChangeHandler; - private ConcurrentHashMap> submittingJobs; - private ScheduledExecutorService executor; - private long quotaResubmitTime = QUOTA_RESUBMIT_MINUTES; - private TimeUnit quotaResubmitUnits = TimeUnit.MINUTES; - - public SubmissionRunnable(CloudComputeService cloudComputeService, VEGLJob curJob, String userDataString, VEGLJobManager jobManager, VGLJobStatusChangeHandler vglJobStatusChangeHandler, - ConcurrentHashMap> submittingJobs, ScheduledExecutorService executor, long quotaResubmitTime, TimeUnit quotaResubmitUnits) { - this.cloudComputeService = cloudComputeService; - this.curJob = curJob; - this.userDataString = userDataString; - this.jobManager = jobManager; - this.vglJobStatusChangeHandler = vglJobStatusChangeHandler; - this.submittingJobs = submittingJobs; - this.executor = executor; - this.quotaResubmitTime = quotaResubmitTime; - this.quotaResubmitUnits = quotaResubmitUnits; - } - - @Override - public void run() { - String instanceId = null; - boolean successfulSubmit = false; - boolean reschedule = false; - Throwable caughtException = null; - - try { - instanceId = cloudComputeService.executeJob(curJob, userDataString); - if (StringUtils.isEmpty(instanceId)) { - throw new PortalServiceException(String.format("Null/Empty instance ID returned for submission to %1$s for job %2$s",cloudComputeService.getId(), curJob.getId())); - } - logger.debug("Launched instance: " + instanceId); - - successfulSubmit = true; - } catch(Throwable e) { - caughtException = e; - successfulSubmit = false; - if (e instanceof PortalServiceException && - ((PortalServiceException) e).getErrorCorrection() != null && - ((PortalServiceException) e).getErrorCorrection().contains("Quota exceeded")) { - reschedule = true; - } - - logger.error("Exception when submitting job " + curJob.getId() + ": " + e.getMessage()); - logger.debug("Exception:", e); - } - - //Update job status / fire listeners. - String oldJobStatus = curJob.getStatus(); - synchronized(submittingJobs) { - if (successfulSubmit) { - //Everything went OK - curJob.setComputeInstanceId(instanceId); - curJob.setStatus(JobBuilderController.STATUS_PENDING); - jobManager.createJobAuditTrail(oldJobStatus, curJob, "Set job to Pending. Instance ID:" + instanceId); - curJob.setSubmitDate(new Date()); - jobManager.saveJob(curJob); - vglJobStatusChangeHandler.handleStatusChange(curJob,curJob.getStatus(),oldJobStatus); - submittingJobs.remove(generateKey(curJob, cloudComputeService)); - } else if (reschedule) { - //Can't get resources now - reschedule for future run - try { - Future newFuture = executor.schedule(this, this.quotaResubmitTime, this.quotaResubmitUnits); //reschedule this to run again in 30 minutes - submittingJobs.put(generateKey(curJob, cloudComputeService), newFuture); - curJob.setStatus(JobBuilderController.STATUS_INQUEUE); - jobManager.saveJob(curJob); - jobManager.createJobAuditTrail(oldJobStatus, curJob, "Job Placed in Queue"); - vglJobStatusChangeHandler.handleStatusChange(curJob,curJob.getStatus(),oldJobStatus); - } catch (RejectedExecutionException ex) { - //This is bad - can't submit more jobs for queue - forced to kill job submission - logger.error("Cannot reschedule job submission:" + ex.getMessage()); - logger.debug("Exception:", ex); - curJob.setStatus(JobBuilderController.STATUS_ERROR); - submittingJobs.remove(generateKey(curJob, cloudComputeService)); - jobManager.saveJob(curJob); - jobManager.createJobAuditTrail(oldJobStatus, curJob, "Unable to queue job for resubmission: " + ex.getMessage()); - vglJobStatusChangeHandler.handleStatusChange(curJob,curJob.getStatus(),oldJobStatus); - } - } else { - //Error state - curJob.setStatus(JobBuilderController.STATUS_ERROR); - jobManager.saveJob(curJob); - jobManager.createJobAuditTrail(oldJobStatus, curJob, caughtException); - vglJobStatusChangeHandler.handleStatusChange(curJob,curJob.getStatus(),oldJobStatus); - submittingJobs.remove(generateKey(curJob, cloudComputeService)); - } - } - } - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/NCIDetailsService.java b/src/main/java/org/auscope/portal/server/web/service/NCIDetailsService.java deleted file mode 100644 index 5db3a90d3..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/NCIDetailsService.java +++ /dev/null @@ -1,86 +0,0 @@ -package org.auscope.portal.server.web.service; - -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.server.web.repositories.NCIDetailsEncRepository; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.security.NCIDetailsEnc; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Lazy; -import org.springframework.stereotype.Service; - -@Service -public class NCIDetailsService { - - @Autowired - NCIDetailsEncRepository nciEncRepository; - - @Autowired - PortalUserService userService; - - @Lazy - @Autowired - private VGLCryptoService encryptionService; - - public NCIDetails getByUser(PortalUser user) throws PortalServiceException { - NCIDetailsEnc encRes = nciEncRepository.findByUser(user); - NCIDetails res = new NCIDetails(); - if(encRes != null) { - res.setId(encRes.getId()); - if(encRes.getKey() != null) { - res.setKey(encryptionService.decrypt(encRes.getKey())); - } - if(encRes.getProject() != null) { - res.setProject(encryptionService.decrypt(encRes.getProject())); - } - res.setUser(encRes.getUser()); - if(encRes.getUsername() != null) { - res.setUsername(encryptionService.decrypt(encRes.getUsername())); - } - } - return res; - } - - public void saveNCIDetails(NCIDetails details) throws PortalServiceException { - NCIDetailsEnc detailsEnc = new NCIDetailsEnc(); - detailsEnc.setId(details.getId()); - detailsEnc.setKey(encryptionService.encrypt(details.getKey())); - detailsEnc.setProject(encryptionService.encrypt(details.getProject())); - //detailsEnc.setUser(details.getUser()); - detailsEnc.setUser(userService.getLoggedInUser()); - detailsEnc.setUsername(encryptionService.encrypt(details.getUsername())); - nciEncRepository.save(detailsEnc); - } - - /* - private VGLCryptoService encryptionService; - - public NCIDetailsDao(VGLCryptoService encryptionService) { - this.encryptionService=encryptionService; - } - - public NCIDetails getByUser(PortalUser user) throws PortalServiceException { - List resList = getHibernateTemplate().findByNamedParam("from NCIDetailsEnc d where d.user =:p", "p", user); - if(resList.isEmpty()) return null; - NCIDetailsEnc encRes = (NCIDetailsEnc) resList.get(0); - NCIDetails res = new NCIDetails(); - res.setId(encRes.getId()); - res.setKey(encryptionService.decrypt(encRes.getKey())); - res.setProject(encryptionService.decrypt(encRes.getProject())); - res.setUser(encRes.getUser()); - res.setUsername(encryptionService.decrypt(encRes.getUsername())); - return res; - } - - public void save(NCIDetails details) throws PortalServiceException { - NCIDetailsEnc detailsEnc = new NCIDetailsEnc(); - detailsEnc.setId(details.getId()); - detailsEnc.setKey(encryptionService.encrypt(details.getKey())); - detailsEnc.setProject(encryptionService.encrypt(details.getProject())); - detailsEnc.setUser(details.getUser()); - detailsEnc.setUsername(encryptionService.encrypt(details.getUsername())); - - getHibernateTemplate().saveOrUpdate(detailsEnc); - } - */ -} diff --git a/src/main/java/org/auscope/portal/server/web/service/PortalUserDetailsService.java b/src/main/java/org/auscope/portal/server/web/service/PortalUserDetailsService.java index 1b8c85f6e..9d9b22421 100644 --- a/src/main/java/org/auscope/portal/server/web/service/PortalUserDetailsService.java +++ b/src/main/java/org/auscope/portal/server/web/service/PortalUserDetailsService.java @@ -7,7 +7,7 @@ import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; import org.auscope.portal.server.web.security.PortalAuthority; import org.auscope.portal.server.web.security.PortalUser; import org.auscope.portal.server.web.security.PortalUser.AuthenticationFramework; @@ -15,7 +15,6 @@ import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.core.userdetails.UsernameNotFoundException; -import org.springframework.util.StringUtils; /** * Service for creating new users @@ -25,9 +24,6 @@ */ public class PortalUserDetailsService implements UserDetailsService { - public static final int SECRET_LENGTH = 32; - private static char[] BUCKET_NAME_WHITELIST = "abcdefghijklmnopqrstuvwxyz0123456789".toCharArray(); - @Autowired private PortalUserService userService; @@ -67,13 +63,13 @@ public PortalUserDetailsService(String defaultRole, Map> ro @Override public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException { - if(StringUtils.isEmpty(username)) + if(StringUtils.isBlank(username)) throw new UsernameNotFoundException("User name is empty"); return this.userService.getById(username); } public UserDetails loadUserByEmail(String email) throws UsernameNotFoundException { - if(StringUtils.isEmpty(email)) + if(StringUtils.isBlank(email)) throw new UsernameNotFoundException("Email is empty"); return this.userService.getByEmail(email); } @@ -93,13 +89,6 @@ public PortalUser createNewUser(String id, newUser.setFullName(userDetails.get("name")); newUser.setAuthentication(authFramework); userService.saveUser(newUser); - // AWS secret and bucketname - synchronized (this.random) { - String randomSecret = RandomStringUtils.random(SECRET_LENGTH, 0, 0, true, true, null, this.random); - newUser.setAwsSecret(randomSecret); - String bucketName = generateRandomBucketName(); - newUser.setS3Bucket(bucketName); - } // Authorities List authorities = new ArrayList<>(); PortalAuthority defaultAuth = new PortalAuthority(defaultRole); @@ -120,12 +109,4 @@ public PortalUser createNewUser(String id, return newUser; } - /** - * Generate a random bucket name for the user - * - * @return a random string preceeded with "vgl-" - */ - public String generateRandomBucketName() { - return "vgl-" + RandomStringUtils.random(32, 0, 0, false, false, BUCKET_NAME_WHITELIST, this.random); - } } diff --git a/src/main/java/org/auscope/portal/server/web/service/ScmEntryService.java b/src/main/java/org/auscope/portal/server/web/service/ScmEntryService.java deleted file mode 100644 index f227b0a01..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/ScmEntryService.java +++ /dev/null @@ -1,706 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.io.StringWriter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.velocity.VelocityContext; -import org.apache.velocity.app.VelocityEngine; -import org.auscope.portal.core.cloud.MachineImage; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.csw.SearchFacet; -import org.auscope.portal.core.services.csw.SearchFacet.Comparison; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VLScmSnapshot; -import org.auscope.portal.server.web.repositories.VLScmSnapshotRepository; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.scm.Dependency; -import org.auscope.portal.server.web.service.scm.Entries; -import org.auscope.portal.server.web.service.scm.Entry; -import org.auscope.portal.server.web.service.scm.Problem; -import org.auscope.portal.server.web.service.scm.ScmLoader; -import org.auscope.portal.server.web.service.scm.ScmLoaderFactory; -import org.auscope.portal.server.web.service.scm.Solution; -import org.auscope.portal.server.web.service.scm.SsscImage; -import org.auscope.portal.server.web.service.scm.Toolbox; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.converter.HttpMessageConverter; -import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; -import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; -import org.springframework.stereotype.Service; -import org.springframework.web.client.RestClientException; -import org.springframework.web.client.RestTemplate; - -import com.fasterxml.jackson.databind.MapperFeature; - - -/** - * A service for handling Scientific Code Marketplace templates. - * - * @author Geoff Squire - * - */ -@Service -public class ScmEntryService implements ScmLoader { - private final Log logger = LogFactory.getLog(getClass()); - - /** Puppet module template resource */ - protected static final String PUPPET_TEMPLATE = - "org/auscope/portal/server/web/service/template.pp"; - - private VLScmSnapshotRepository vlScmSnapshotRepository; - private VelocityEngine velocityEngine; - private VEGLJobManager jobManager; - private CloudComputeService[] cloudComputeServices; - - private List> converters; - - static String solutionsUrl; - - /** - * Create a new instance. - */ - @Autowired - public ScmEntryService(VLScmSnapshotRepository vlScmSnapshotRepository, - VEGLJobManager jobManager, - VelocityEngine velocityEngine, - CloudComputeService[] cloudComputeServices) { - super(); - this.vlScmSnapshotRepository = vlScmSnapshotRepository; - this.jobManager = jobManager; - this.setVelocityEngine(velocityEngine); - this.cloudComputeServices = cloudComputeServices; - - // Configure Jackson converters for use with RestTemplate - this.converters = new ArrayList>(); - this.converters.add( - new MappingJackson2HttpMessageConverter( - new Jackson2ObjectMapperBuilder() - .featuresToEnable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS) - .build() - ) - ); - - // Register this bean as the ScmLoader instance to use - ScmLoaderFactory.registerLoader(this); - } - - /** - * Return id of the VM for entry at computeServiceId, or null if not found. - * - * @param entryId SCM template entry ID - * @param computeServiceId ID of the CloudComputeService provider - * @return Snapshot id if one exists, otherwise null - */ - public String getScmEntrySnapshotId(String entryId, - String computeServiceId) { - String vmId = null; - VLScmSnapshot snapshot = vlScmSnapshotRepository - .findByScmEntryIdAndComputeServiceId(entryId, computeServiceId); - if (snapshot != null) { - vmId = snapshot.getComputeVmId(); - } - return vmId; - } - - /** - * Update job (jobId) with vmId and computeServiceId for solution - * if we have one. - * - * @param jobId String job ID - * @param solutionId String solution URL - * @param user Authenticated PortalUser - * @throws PortalServiceException - */ - public void updateJobForSolution(VEGLJob job, Set solutions, PortalUser user) - throws PortalServiceException { - // Store the solutionId in the job - job.setJobSolutions(solutions); - - // Save the job - try { - jobManager.saveJob(job); - } catch (Exception ex) { - logger.error("Error updating job " + job, ex); - throw new PortalServiceException("Error updating job for solution: ", ex); - } - } - - /** - * Return the puppet module for SCM solution. - * - * Generates a puppet module that will provision a VM suitable for - * running a job using the SCM entry. - * - * Placeholder parameters: - * - * - * - * - * - * - * - * - * - * - * - *
sc-nameName of the scientific code
sourceMap of source parameters
source.typeSource repository type ("git", "svn")
source.urlSource repository URL
source.checkoutCheckout target for source repository
source.execShell command to execute after source checkout.
system_packagesList of system packages
python_packagesList of python packages
python_requirementsPath to a pip requirements.txt file in the source
- * - * @param solutionUrl String URL of the SCM solution - * @return String contents of the puppet module - */ - public String createPuppetModule(String solutionUrl) throws PortalServiceException { - // Fetch the solution entry from the SCM - Solution solution = getScmSolution(solutionUrl); - - // Create a velocity template vars map from the entry - Map vars = puppetTemplateVars(solution); - VelocityContext velocityContext = new VelocityContext(vars); - StringWriter stringWriter = new StringWriter(); - velocityEngine.mergeTemplate(PUPPET_TEMPLATE, "UTF-8", velocityContext, stringWriter); - return stringWriter.toString(); - } - - /** - * Retrieve and decode an SCM entry. - * - * @param entryUrl String URL of the catalogue entry - * @return Map deserialization of the json response - * - */ - public Solution getScmSolution(String entryUrl) { - Solution solution = null; - RestTemplate rest = this.restTemplate(); - - try { - solution = rest.getForObject(entryUrl, Solution.class); - } - catch (RestClientException ex) { - logger.error("Failed to get SSC solution (" + entryUrl + ")", ex); - } - - return solution; - } - - /** - * Retieve and return listing of all solutions available. - * - */ - public SolutionResponse getSolutions() throws PortalServiceException { - return getSolutions((List>) null); - } - - /** - * Return the Solutions for a specific Problem. - * - * @param problem Problem to find Solutions for, or all Solutions if null. - * @return List list of Solutions if any. - * - */ - public SolutionResponse getSolutions(Problem problem) throws PortalServiceException { - return getSolutions(Arrays.asList(new SearchFacet(problem, "problem", Comparison.Equal))); - } - - /** - * Return the Solutions filtered by the specified search facets. - * - * @param problem Problem to find Solutions for, or all Solutions if null. - * @param providers The set of cloud compute services to consider, if null it will use the default provider set - * @return List list of Solutions if any. - * - */ - public SolutionResponse getSolutions(List> facets) - throws PortalServiceException { - return getSolutions(facets, null); - } - - /** - * Return the Solutions filtered by the specified search facets. - * - * @param problem Problem to find Solutions for, or all Solutions if null. - * @param providers The set of cloud compute services to consider, if null it will use the default provider set - * @return List list of Solutions if any. - * - */ - public SolutionResponse getSolutions(List> facets, CloudComputeService[] providers) - throws PortalServiceException { - StringBuilder url = new StringBuilder(); - RestTemplate rest = this.restTemplate(); - Entries solutions; - - url.append(solutionsUrl).append("/solutions"); - - //Apply our search facets to the query - String problemIdFilter = null; - String providerFilter = null; - if (facets != null) { - for (SearchFacet facet : facets) { - if (facet.getValue() instanceof Problem) { - problemIdFilter = ((Problem) facet.getValue()).getId(); - } else if (facet.getValue() instanceof String) { - if (facet.getField().equals("text")) { - logger.error("Any Text filtering currently unsupported"); - } else if (facet.getField().equals("provider")) { - providerFilter = (String) facet.getValue(); - } - } - } - } - if (problemIdFilter != null) { - url.append("?problem={problem_id}"); - solutions = rest.getForObject(url.toString(), - Entries.class, - problemIdFilter); - } - else { - solutions = rest.getForObject(url.toString(), Entries.class); - } - - return usefulSolutions(solutions.getSolutions(), providerFilter, providers); - } - - /** - * Return list of Solutions in solutions that are usable in this portal. - * - * Finds Solutions in solutions that can be used in this - * portal. Either they refer to at least one image we can use, or - * supply the information we need to create an image at runtime. - * - * Where a Solution has image(s) already, filter the set to those - * we can use. Currently this means at a cloud provider we can - * use, and we assume the image already has the portal - * infrastructure in place. - * - * @param solutions List solutions from the SSC - * @param providerFilter If non null, the available provider list will be limited to providers with this ID. - * @return List subset of solutions that are usable - * - */ - private SolutionResponse usefulSolutions(List solutions, - String providerFilter, - CloudComputeService[] configuredComputeServices) - throws PortalServiceException { - - SolutionResponse useful = new SolutionResponse(); - - Set allProviders = new HashSet(); - Set configuredProviders = new HashSet(); - - Arrays.stream(configuredComputeServices).forEach(ccs -> configuredProviders.add(ccs.getId())); - Arrays.stream(cloudComputeServices).forEach(ccs -> allProviders.add(ccs.getId())); - - for (Solution solution: solutions) { - // Solution with toolbox with at least one image at a - // provider we can use is useful. - boolean foundConfigured = false; - boolean foundUnconfigured = false; - for (Dependency dep: solution.getDependencies()) { - if (dep.type == Dependency.Type.TOOLBOX) { - Toolbox toolbox = restTemplate().getForObject(dep.identifier, Toolbox.class); - for (SsscImage image: toolbox.getImages()) { - String provider = image.getProvider(); - - if (StringUtils.isNotEmpty(providerFilter) && !providerFilter.equals(provider)) { - continue; - } - - if (configuredProviders.contains(provider)) { - foundConfigured = true; - break; - } else if (allProviders.contains(provider)) { - foundUnconfigured = true; - } - } - } - } - - if (foundConfigured) { - useful.getConfiguredSolutions().add(solution); - } else if (foundUnconfigured) { - useful.getUnconfiguredSolutions().add(solution); - } else { - useful.getOtherSolutions().add(solution); - } - } - - return useful; - } - - /** - * Return the Solution object(s) for job (if known). - * - * @param job VEGLJob object - * @returns Solution object if job has a solutionId - */ - public Set getJobSolutions(VEGLJob job) { - Solution solution = null; - HashSet solutions = new HashSet<>(); - - if (job != null) { - for (String uri: job.getJobSolutions()) { - solution = getScmSolution(uri); - if (solution != null) { - solutions.add(solution); - } - } - } - - return solutions; - } - - /** - * Return a Set of the Toolbox object(s) for job. - * - * @param job VEGLJob object - * @returns Set of Solution Objects. - */ - public Set getJobToolboxes(VEGLJob job) throws PortalServiceException { - HashSet toolboxes = new HashSet<>(); - - for (Solution solution: getJobSolutions(job)) { - toolboxes.addAll(entryToolboxes(solution)); - } - - return toolboxes; - } - - /** - * Return image info for toolbox at the specified cloud provider, or null. - * - * Uses the toolbox name and description as metadata for the machine image. - * - * TODO: Extract image metadata from compute provider, as well as toolbox - * info. - * - * @param toolbox Toolbox of interest - * @param provider ID of cloud Provider - * @returns MachineImage with id and metadata of cloud Image - */ - public MachineImage getToolboxImage(Toolbox toolbox, String provider) { - if (toolbox != null && provider != null) { - // Toolbox model allows multiple images for a given provider, but we - // assume only one in practice, so take the first one that matches - // the requested provider. - for (SsscImage img: toolbox.getImages()) { - if (provider.equals(img.getProvider())) { - // Allow the image to override the run command, fall back to - // the toolbox supplied command (if any). Test for isEmpty - // rather than isBlank since we want to allow an image to - // override a non-blank toolbox command with an empty - // string. - String runCommand = img.getCommand(); - if (StringUtils.isEmpty(runCommand)) { - runCommand = toolbox.getCommand(); - } - - MachineImage image = new MachineImage(img.getImageId()); - image.setName(toolbox.getName()); - image.setDescription(toolbox.getDescription()); - image.setRunCommand(runCommand); - image.setAnnotations(img.getAnnotations()); - return image; - } - } - } - - return null; - } - - /** - * Return a map of computeServiceId to imageIds valid for job. - * - * @return Map> with images for job, or null. - * @throws PortalServiceException - */ - public Map> getJobImages(Integer jobId, PortalUser user) throws PortalServiceException { - if (jobId == null) { - return null; - } - - VEGLJob job = jobManager.getJobById(jobId, user); - - return getJobImages(job, user); - } - - public Map> getJobImages(VEGLJob job, PortalUser user) throws PortalServiceException { - if (job == null) { - return null; - } - - return getJobImages(getJobSolutions(job), user); - } - - public Map> getJobImages(Collection solutionIds, PortalUser user) throws PortalServiceException { - if (solutionIds == null) { - return null; - } - - Set solutions = solutionIds.stream().map((String id) -> getScmSolution(id)).collect(Collectors.toSet()); - - return getJobImages(solutions, user); - } - - /** - * Return a map from compute service ids to the set of images they can - * provide for the solutions specified for the job. - * - * @param solutions Set solutions for the job in question - * @param user PortalUser currently logged in user - * @return Map> mapping from compute service id to set of image(s) they can provide - * @throws PortalServiceException - */ - public Map> getJobImages(Set solutions, - PortalUser user) - throws PortalServiceException { - Map> images = new HashMap<>(); - - for (Solution solution: solutions) { - for (Toolbox toolbox: entryToolboxes(solution)) { - for (SsscImage img: toolbox.getImages()) { - String providerId = img.getProvider(); - Set vms = images.get(providerId); - if (vms == null) { - vms = new HashSet<>(); - images.put(providerId, vms); - } - MachineImage mi = new MachineImage(img.getImageId()); - mi.setName(toolbox.getName()); - mi.setDescription(toolbox.getDescription()); - mi.setRunCommand(img.getCommand()); - mi.setAnnotations(img.getAnnotations()); - vms.add(mi); - } - } - } - - return images; - } - - /** - * Return a Set of compute service ids with images for job with jobId. - * - * @return Set of compute service ids for job, or null if jobId == null. - * @throws PortalServiceException - */ - public Set getJobProviders(Integer jobId, PortalUser user) throws PortalServiceException { - Map> images = getJobImages(jobId, user); - return (images != null) ? images.keySet() : null; - } - - /** - * Return a set of compute service ids that can provide a toolbox suitable - * for running a job comprising the specified solutions. - * - * @param solutionIds Collection of ids for the job's solutions - * @param user PortalUser with the current logged in user - * @return Set of compute service id strings - * @throws PortalServiceException - */ - public Set getJobProviders(Collection solutionIds) - throws PortalServiceException { - - Set solutions = solutionIds.stream().map((String id) -> getScmSolution(id)).collect(Collectors.toSet()); - Set> providersForSolutions = new HashSet>(); - Set result = null; - - for (Solution solution: solutions) { - Set providers = new HashSet(); - if(result == null) { - result = providers; - } else { - providersForSolutions.add(providers); - } - - for (Toolbox toolbox: entryToolboxes(solution)) { - for (SsscImage img: toolbox.getImages()) { - String providerId = img.getProvider(); - providers.add(providerId); - } - } - } - - if(result==null) { - return null; - } - - for (Set providers : providersForSolutions) { - result.retainAll(providers); - } - - return result; - } - - /** - * Return a set of compute servivce ids that can provide a toolbox - * suitable for running a job comprising the specified solutions. - * - * @param solutions Set of solutions for the job in question - * @param user PortalUser currently logged in user - * @return Set of compute service ids - * @throws PortalServiceException - */ - public Set getJobProviders(Set solutions, PortalUser user) throws PortalServiceException { - Map> images = getJobImages(solutions, user); - return (images != null) ? images.keySet() : null; - } - - /** - * Return a list of the Toolbox dependencies for entry. - * - * @param entry Entry to check dependencies - * @return List Toolbox dependencies for Entry - */ - public List entryToolboxes(Entry entry) throws PortalServiceException { - List toolboxes = new ArrayList(); - - for (Dependency dep: entry.getDependencies()) { - if (dep.type == Dependency.Type.TOOLBOX) { - toolboxes.add(restTemplate().getForObject(dep.identifier, Toolbox.class)); - } - } - - return toolboxes; - } - - private Map puppetTemplateVars(Solution solution) - throws PortalServiceException { - Map vars = new HashMap<>(); - // Make sure we have full Toolbox details. - List toolboxes = entryToolboxes(solution); - if (toolboxes.size() > 0) { - Toolbox toolbox = toolboxes.get(0); - - vars.put("sc_name", safeScName(toolbox)); - vars.put("source", toolbox.getSource()); - - ArrayList puppetModules = new ArrayList<>(); - ArrayList pythonPackages = new ArrayList<>(); - ArrayList requirements = new ArrayList<>(); - // Merge dependencies from solution and toolbox - dependencies(toolbox.getDependencies(), - puppetModules, - pythonPackages, - requirements); - dependencies(solution.getDependencies(), - puppetModules, - pythonPackages, - requirements); - vars.put("puppet_modules", puppetModules); - vars.put("python_packages", pythonPackages); - vars.put("python_requirements", requirements); - } - return vars; - } - - private void dependencies(List deps, - List puppetModules, - List pythonPackages, - List requirements) { - for (Dependency dep: deps) { - switch (dep.type) { - case PUPPET: - puppetModules.add(dep.identifier); - break; - case REQUIREMENTS: - requirements.add(dep.identifier); - break; - case PYTHON: - pythonPackages.add(dep.identifier); - break; - default: - logger.warn("Unknown dependency type (" + dep + ")"); - } - } - } - - /** - * Return a safe name for the scientific code used by toolbox. - * - * The name will be used to generate puppet classes and the path - * where the code will be installed on the VM. - * - * Simple solution: strip out all non-word characters as defined - * by the java regex spec. - * - */ - private static String safeScName(Toolbox toolbox) { - return toolbox.getName().replaceAll("\\W", ""); - } - - /** - * @return the vlScmSnapshotDao - */ - /* - public VLScmSnapshotDao getVlScmSnapshotDao() { - return vlScmSnapshotDao; - } - */ - - /** - * @param vlScmSnapshotDao the vlScmSnapshotDao to set - */ - /* - public void setVlScmSnapshotDao(VLScmSnapshotDao vlScmSnapshotDao) { - this.vlScmSnapshotDao = vlScmSnapshotDao; - } - */ - - /** - * @return the velocityEngine - */ - public VelocityEngine getVelocityEngine() { - return velocityEngine; - } - - /** - * @param velocityEngine the velocityEngine to set - */ - public void setVelocityEngine(VelocityEngine velocityEngine) { - this.velocityEngine = velocityEngine; - } - - /** - * Static setter used to inject the configured solution center URL. - */ - public static void setSolutionsUrl(String solutionsUrl) { - ScmEntryService.solutionsUrl = solutionsUrl; - } - - private RestTemplate restTemplate() { - return new RestTemplate(this.converters); - } - - @Override - public T loadEntry(String id, Class cls) { - logger.debug(String.format("Loading ref-only %s from %s", cls.getName(), id)); - T entry = restTemplate().getForObject(id, cls); - return entry; - } - - @Override - public Problem loadProblem(String id) { - return loadEntry(id, Problem.class); - } - - @Override - public Toolbox loadToolbox(String id) { - return loadEntry(id, Toolbox.class); - } - - @Override - public Solution loadSolution(String id) { - return loadEntry(id, Solution.class); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/ScriptBuilderService.java b/src/main/java/org/auscope/portal/server/web/service/ScriptBuilderService.java deleted file mode 100644 index f43d83963..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/ScriptBuilderService.java +++ /dev/null @@ -1,109 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PrintWriter; -import java.util.Map; - -import org.apache.commons.lang.text.StrSubstitutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.FileStagingService; -import org.auscope.portal.core.util.FileIOUtil; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.web.security.PortalUser; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.stereotype.Service; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.ResponseBody; -import org.springframework.web.bind.annotation.ResponseStatus; - -/** - * A service class for providing functionality for saving 'scripts' against a particular job. - * - * @author Josh Vote - * - */ -@Service -public class ScriptBuilderService { - public static final String SCRIPT_FILE_NAME = "vl_script.py"; - - private final Log logger = LogFactory.getLog(getClass()); - - /** For saving our files to a staging area*/ - private FileStagingService jobFileService; - - /** - * Creates a new instance - * @param jobFileService - * @param jobManager - */ - @Autowired - public ScriptBuilderService(FileStagingService jobFileService) { - super(); - this.jobFileService = jobFileService; - } - - /** - * Saves the specified script text as the primary script to be run by the job with specified ID - * @param jobId - * @param scriptText - * @throws PortalServiceException - */ - public void saveScript(VEGLJob job, String scriptText, PortalUser user) throws PortalServiceException { - //Apply text contents to job stage in directory - try (OutputStream scriptFile = jobFileService.writeFile(job, SCRIPT_FILE_NAME)) { - PrintWriter writer = new PrintWriter(scriptFile); - writer.print(scriptText); - writer.close(); - } catch (Exception e) { - logger.error("Couldn't write script file: " + e.getMessage()); - logger.debug("error: ", e); - throw new PortalServiceException("Couldn't write script file for job " + job, e); - } - } - - /** - * Loads the saved VL script source with a specified job ID - * @param jobId - * @return the file contents if the script file exists otherwise an empty string if the script file doesn't exist or is empty. - * @throws PortalServiceException - */ - public String loadScript(VEGLJob job, PortalUser user) throws PortalServiceException { - try (InputStream is = jobFileService.readFile(job, SCRIPT_FILE_NAME)){ - //Load script from VL server's filesystem - - String script = null; - if (is == null) { - logger.warn("User script file does not exist."); - script = ""; - } else { - script = FileIOUtil.convertStreamtoString(is); - } - return script; - } catch (Exception ex) { - logger.error("Error loading script.", ex); - throw new PortalServiceException("There was a problem loading your script.", "Please report this error to cg_admin@csiro.au"); - } - } - - /** - * A string format function supporting named placeholders in the form ${key} - * - * @param templateText the format string/template string to be used for replacement - * @param values The key/value pairs to be used in replacing placeholders in templateText - * @return - */ - public String populateTemplate(String templateText, Map values) { - return StrSubstitutor.replace(templateText, values); - } - - @ExceptionHandler(AccessDeniedException.class) - @ResponseStatus(value = org.springframework.http.HttpStatus.FORBIDDEN) - public @ResponseBody String handleException(AccessDeniedException e) { - return e.getMessage(); - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/SolutionResponse.java b/src/main/java/org/auscope/portal/server/web/service/SolutionResponse.java deleted file mode 100644 index 6d9bbf941..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/SolutionResponse.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; - -import org.auscope.portal.server.web.service.scm.Solution; - -/** - * Represents a response to a request for solutions. The response is broken into three parts: - * 1) Solutions that match the query AND have a toolbox image matching a computer provider that a user has configured - * 2) Solutions that match the query AND DO NOT have a toolbox image matching a compute provider that the user has configured (but match one of the unconfigured compute providers) - * 3) Solutions that match the query AND DO NOT have a toolbox image matching any known compute provider - * @author Josh Vote (CSIRO) - * - */ -public class SolutionResponse implements Serializable { - private static final long serialVersionUID = 4394895058754721560L; - - private List configuredSolutions; - private List unconfiguredSolutions; - private List otherSolutions; - - public SolutionResponse() { - super(); - configuredSolutions = new ArrayList(); - unconfiguredSolutions = new ArrayList(); - otherSolutions = new ArrayList(); - } - - /** - * Solutions that match the query AND have a toolbox image matching a computer provider that a user has configured - * @return - */ - public List getConfiguredSolutions() { - return configuredSolutions; - } - - /** - * Solutions that match the query AND DO NOT have a toolbox image matching a compute provider that the user has configured (but match one of the unconfigured compute providers) - * @return - */ - public List getUnconfiguredSolutions() { - return unconfiguredSolutions; - } - - /** - * Solutions that match the query AND DO NOT have a toolbox image matching any known compute provider - * @return - */ - public List getOtherSolutions() { - return otherSolutions; - } - - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/TemplateLintService.java b/src/main/java/org/auscope/portal/server/web/service/TemplateLintService.java deleted file mode 100644 index ae6967499..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/TemplateLintService.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - -import org.springframework.stereotype.Service; - - * This file is part of the Virtual Geophysics Laboratory (VGL) project. - * Copyright (c) 2016, CSIRO - * - * Licensed under the terms of the GNU Lesser General Public License. - */ -package org.auscope.portal.server.web.service; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.InputStreamReader; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import javax.annotation.Resource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.services.PortalServiceException; -import org.springframework.stereotype.Service; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * Checks a template script for errors or other issues. - * - * @author Geoff Squire - */ -@Service -public class TemplateLintService { - private final Log logger = LogFactory.getLog(getClass()); - - /** - * Enumerate template languages we support. - */ - public enum TemplateLanguage { PYTHON3, PYTHON2 } - - /** - * Default time to wait for lint process to complete in seconds. - * - * TODO: Make this configurable. - */ - public static final long DEFAULT_TIMEOUT = 5l; - - @Resource(name="pylintCommand") - private List pylintCommand; - - /** - * Create a new instance. - */ - public TemplateLintService() { - super(); - } - - /** - * Check a template script code and return any errors or other issues found. - * - * @param template String containing template code - * @param language TemplateLanguage language used for template - * @return List containing errors/issues found, if any - */ - public List checkTemplate(String template, TemplateLanguage language) - throws PortalServiceException - { - List lints = null; - - switch (language) { - case PYTHON3: - // fall through - // TODO: support py2/3 environments - case PYTHON2: - lints = pylint(template); - break; - default: - throw new PortalServiceException(String.format("Unsupported template language ({})", - language)); - } - - return lints; - } - - private List pylint(String template) - throws PortalServiceException - { - List lints = new ArrayList(); - - // Save template as a temporary python file - Path f; - try { - f = Files.createTempFile("contemplate", ".py"); - BufferedWriter writer = Files.newBufferedWriter(f); - writer.write(template); - writer.close(); - } - catch (Exception ex) { - throw new PortalServiceException("Failed to write template to temporary file.", ex); - } - - // Run pylint in the temp file's directory - String results; - String errors; - ArrayList cmd = new ArrayList(this.pylintCommand); - cmd.add(f.getFileName().toString()); - try { - ProcessBuilder pb = - new ProcessBuilder(cmd).directory(f.getParent().toFile()); - - // Start the process, and consume the results immediately so Windows is happy. - Process p = pb.start(); - BufferedReader stdout = - new BufferedReader(new InputStreamReader(p.getInputStream())); - results = stdout.lines().collect(Collectors.joining("\n")); - BufferedReader stderr = - new BufferedReader(new InputStreamReader(p.getErrorStream())); - errors = stderr.lines().collect(Collectors.joining("\n")); - - if (!p.waitFor(DEFAULT_TIMEOUT, TimeUnit.SECONDS)) { - // Timed out - throw new PortalServiceException(String.format("pylint process failed to complete before {} second timeout elapsed", DEFAULT_TIMEOUT)); - } - - // Finished successfully? pylint returns 0 on success *with no - // issues*, 1 on failure to run properly, 2/4/8/16 for successful - // completion with python convention/refactor/warning/error (codes - // 2-16 bit-ORd into final result) or 32 on usage error. - int rv = p.exitValue(); - if (rv == 1 || rv == 32) { - logger.error("pylint failed"); - logger.debug("\npylint stderr:\n" + errors); - logger.debug("\npylint stdout:\n" + results); - throw new PortalServiceException(String.format("pylint process returned non-zero exit value: {}", rv)); - } - else if (rv != 0) { - logger.info("pylint found issues"); - } - } - catch (PortalServiceException pse) { - throw pse; - } - catch (Exception ex) { - throw new PortalServiceException("Failed to run pylint on template", ex); - } - - // Parse results into LintResult objects - lints = parsePylintResults(results); - - // Clean up - try { - Files.delete(f); - } - catch (Exception ex) { - throw new PortalServiceException("Failed to delete temporary template file.", ex); - } - - return lints; - } - - /** - * Parse pylint results into LintResult objects. - * - * @param input InputStream with results text - * @return List with issues - */ - private List parsePylintResults(String input) - throws PortalServiceException - { - List lints = new ArrayList(); - ObjectMapper mapper = new ObjectMapper(); - JsonNode root = null; - - if (!input.trim().isEmpty()) { - try { - root = mapper.readTree(input); - } - catch (Exception ex) { - throw new PortalServiceException("Failed to parse pylint result json", ex); - } - if (root == null) { - throw new PortalServiceException("No JSON content found in pylint results"); - } - else if (!root.isArray()) { - throw new PortalServiceException - (String.format("Unsupported pylint results: {}", - root.toString())); - } - - // Parsed json, so extract LintResult objects - for (JsonNode result: root) { - LintResult.Severity severity = - result.get("type").asText().equals("error") - ? LintResult.Severity.ERROR - : LintResult.Severity.WARNING; - lints.add( - new LintResult(severity, - result.get("message").asText(), - - // pylint returns 1-based line count. - new LintResult.Location( - result.get("line").asInt() - 1, - result.get("column").asInt()) - ) - ); - } - } - - return lints; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/VEGLJobService.java b/src/main/java/org/auscope/portal/server/web/service/VEGLJobService.java deleted file mode 100644 index b8513d89b..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/VEGLJobService.java +++ /dev/null @@ -1,120 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.util.List; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.web.repositories.VEGLJobRepository; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetails; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.stereotype.Service; - -@Service -public class VEGLJobService { - - @Autowired - private VEGLJobRepository jobRepository; - - - /** - * Retrieves jobs that are grouped under given series. - * It excludes jobs that are deleted. - * - * @param seriesID the ID of the series - * @param user - */ - public List getJobsOfSeries(final int seriesId, PortalUser user) { - List res = jobRepository.findBySeriesIdAndEmail(seriesId, user.getEmail()); - for (VEGLJob job : res) { - job.setProperty(CloudJob.PROPERTY_STS_ARN, user.getArnExecution()); - job.setProperty(CloudJob.PROPERTY_CLIENT_SECRET, user.getAwsSecret()); - job.setProperty(CloudJob.PROPERTY_S3_ROLE, user.getArnStorage()); - } - return res; - } - - /** - * Retrieves jobs that are grouped under a given user. - * It excludes jobs that are deleted. - * - * @param user - * @return - */ - public List getJobsOfUser(PortalUser user) { - List res = jobRepository.findByEmail(user.getEmail()); - for (VEGLJob job : res) { - job.setProperty(CloudJob.PROPERTY_STS_ARN, user.getArnExecution()); - job.setProperty(CloudJob.PROPERTY_CLIENT_SECRET, user.getAwsSecret()); - job.setProperty(CloudJob.PROPERTY_S3_ROLE, user.getArnStorage()); - } - - return res; - } - - /** - * Retrieves jobs that are either pending or active. - * - * !!! Does not check authorization. Internal use only. Never expose to web end point. - * @return a list of pending or active jobs. - */ - public List getPendingOrActiveJobs() { - return jobRepository.findPendingOrActiveJobs(); - } - - /**getInQueueJobs - * Retrieves jobs that are either pending or active. - * - * !!! Does not check authorization. Internal use only. Never expose to web end point. - * @return a list of pending or active jobs. - */ - public List getInQueueJobs() { - return jobRepository.findInqueueJobs(); - } - - /** - * Retrieves the job with given ID. - * @param user - */ - public VEGLJob get(final int id, PortalUser user) { - VEGLJob job = jobRepository.findById(id).orElse(null); - if(job != null) { - job.setProperty(CloudJob.PROPERTY_STS_ARN, user.getArnExecution()); - job.setProperty(CloudJob.PROPERTY_CLIENT_SECRET, user.getAwsSecret()); - job.setProperty(CloudJob.PROPERTY_S3_ROLE, user.getArnStorage()); - if( job.getEmailAddress() == null || user.getEmail()==null || (!job.getEmailAddress().trim().equalsIgnoreCase(user.getEmail().trim()) )) - throw new AccessDeniedException("User does not have access to the requested job"); - } - return job; - } - - /** - * Deletes the job with given ID. - */ - public void deleteJob(final VEGLJob job) { - jobRepository.delete(job); - } - - /** - * Saves or updates the given job. - */ - public void saveJob(final VEGLJob job) { - jobRepository.save(job); - } - - public VEGLJob get(int id, String stsArn, String clientSecret, String s3Role, String userEmail, String nciUser, String nciProj, String nciKey) { - VEGLJob job = jobRepository.findById(id).orElse(null); - if(job != null) { - if( job.getEmailAddress() == null || userEmail==null || (!job.getEmailAddress().trim().equalsIgnoreCase(userEmail.trim()) )) - throw new AccessDeniedException("User does not have access to the requested job"); - job.setProperty(CloudJob.PROPERTY_STS_ARN, stsArn); - job.setProperty(CloudJob.PROPERTY_CLIENT_SECRET, clientSecret); - job.setProperty(CloudJob.PROPERTY_S3_ROLE, s3Role); - job.setProperty(NCIDetails.PROPERTY_NCI_USER, nciUser); - job.setProperty(NCIDetails.PROPERTY_NCI_PROJECT, nciProj); - job.setProperty(NCIDetails.PROPERTY_NCI_KEY, nciKey); - } - return job; - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/VEGLSeriesService.java b/src/main/java/org/auscope/portal/server/web/service/VEGLSeriesService.java deleted file mode 100644 index 23d29e22d..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/VEGLSeriesService.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.util.List; -import org.apache.commons.lang.StringUtils; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.web.repositories.VEGLSeriesRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.domain.Example; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.stereotype.Service; - -@Service -public class VEGLSeriesService { - - @Autowired - private VEGLSeriesRepository seriesRepository; - - - /** - * Queries for series matching the given criteria. Some but not all of - * the parameters may be null. - */ - public List query(final String user, final String name, - final String desc) { - VEGLSeries series = new VEGLSeries(); - if (StringUtils.isNotEmpty(user)) { - series.setUser(user); - } else { - series.setUser(null); - } - if (StringUtils.isNotEmpty(name)) { - series.setName(name); - } else { - series.setName(null); - } - if (StringUtils.isNotEmpty(desc)) { - series.setDescription(desc); - } else { - series.setDescription(null); - } - Example example = Example.of(series); - return seriesRepository.findAll(example); - } - - /** - * Retrieves the series with given ID. - * @param user - */ - public VEGLSeries get(final int id, String userEmail) { - VEGLSeries res = seriesRepository.findById(id).orElse(null); - if( (res!=null) && (! res.getUser().equalsIgnoreCase(userEmail))) { - throw new AccessDeniedException("User not authorized to access series: "+id); - } - return res; - } - - /** - * Saves or updates the given series. - */ - public void save(final VEGLSeries series) { - seriesRepository.save(series); - } - - /** - * Delete the given series. - */ - public void delete(final VEGLSeries series) { - seriesRepository.delete(series); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/VGLCryptoService.java b/src/main/java/org/auscope/portal/server/web/service/VGLCryptoService.java deleted file mode 100644 index e2ac2324d..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/VGLCryptoService.java +++ /dev/null @@ -1,99 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.nio.charset.StandardCharsets; -import java.security.AlgorithmParameters; -import java.security.GeneralSecurityException; -import java.security.SecureRandom; -import java.util.Base64; - -import javax.crypto.Cipher; -import javax.crypto.SecretKey; -import javax.crypto.SecretKeyFactory; -import javax.crypto.spec.IvParameterSpec; -import javax.crypto.spec.PBEKeySpec; -import javax.crypto.spec.SecretKeySpec; - -import org.auscope.portal.core.services.PortalServiceException; - -/** - * Created by Carsten Friedrich (fri096) for VGL. - * - * A Service for basic encryption and decryption - */ - -public class VGLCryptoService { - public static final String SECRET_KEY_SPEC = "AES"; - public static final String CIPHER = "AES/CBC/PKCS5Padding"; - public static final String PASSWORD_BASED_ALGO = "PBKDF2WithHmacSHA1"; - public static final int KEY_SIZE = 128; - public static final int CRYPTO_ITERATIONS = 1024; - - private String encryptionPassword; - - public VGLCryptoService(String encryptionPassword) throws PortalServiceException { - if(encryptionPassword==null || encryptionPassword.isEmpty()) - throw new PortalServiceException("Configuration parameter cloud.encryption.password must not be empty!"); - this.encryptionPassword=encryptionPassword; - } - - public static byte[] generateSalt(int size) { - byte[] res = new byte[size]; - SecureRandom r = new SecureRandom(); - r.nextBytes(res); - return res; - } - - public String decrypt(byte[] data) - throws PortalServiceException { - try { - String cryptoString = new String(data, StandardCharsets.UTF_8); - String[] cyptoInfo = cryptoString.split("@"); - if(cyptoInfo.length!=3) - throw new PortalServiceException("Invalid crypto info: "+cryptoString); - - SecretKeyFactory kf = SecretKeyFactory.getInstance(PASSWORD_BASED_ALGO); - PBEKeySpec keySpec = new PBEKeySpec(encryptionPassword.toCharArray(), Base64.getDecoder().decode(cyptoInfo[0]), - CRYPTO_ITERATIONS, KEY_SIZE); - SecretKey tmp = kf.generateSecret(keySpec); - - byte[] endcoded = tmp.getEncoded(); - SecretKey key = new SecretKeySpec(endcoded, SECRET_KEY_SPEC); - - Cipher ciph = Cipher.getInstance(CIPHER); - - ciph.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(Base64.getDecoder().decode(cyptoInfo[1]))); - return new String(ciph.doFinal(Base64.getDecoder().decode(cyptoInfo[2])), StandardCharsets.UTF_8); - - } catch (GeneralSecurityException e) { - throw new PortalServiceException("Decryption error: " + e.getMessage(), e); - } - } - - public byte[] encrypt(String dataStr) throws PortalServiceException { - try { - byte[] salt = generateSalt(8); - SecretKeyFactory kf = SecretKeyFactory.getInstance(PASSWORD_BASED_ALGO); - - PBEKeySpec keySpec = new PBEKeySpec(encryptionPassword.toCharArray(), salt, CRYPTO_ITERATIONS, - KEY_SIZE); - - SecretKey tmp = kf.generateSecret(keySpec); - SecretKey key = new SecretKeySpec(tmp.getEncoded(), SECRET_KEY_SPEC); - - Cipher ciph = Cipher.getInstance(CIPHER); - - ciph.init(Cipher.ENCRYPT_MODE, key); - AlgorithmParameters params = ciph.getParameters(); - byte[] iv = params.getParameterSpec(IvParameterSpec.class).getIV(); - - byte[] cipherText = ciph.doFinal(dataStr.getBytes(StandardCharsets.UTF_8)); - String resultString = Base64.getEncoder().encodeToString(salt) + "@" + - Base64.getEncoder().encodeToString(iv) + "@" + - Base64.getEncoder().encodeToString(cipherText); - return resultString.getBytes(StandardCharsets.UTF_8); - } catch (GeneralSecurityException e) { - throw new PortalServiceException("Encryption error: " + e.getMessage(), e); - } - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/VGLJobAuditLogService.java b/src/main/java/org/auscope/portal/server/web/service/VGLJobAuditLogService.java deleted file mode 100644 index f3086c806..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/VGLJobAuditLogService.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.util.List; - -import org.auscope.portal.server.vegl.VGLJobAuditLog; -import org.auscope.portal.server.web.repositories.VGLJobAuditLogRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -@Service -public class VGLJobAuditLogService { - - @Autowired - private VGLJobAuditLogRepository jobAuditLogRepository; - - - public List getAuditLogsOfJob(final int jobId) { - return jobAuditLogRepository.findByJobId(jobId); - } - - - /** - * Retrieves the series with given ID. - */ - public VGLJobAuditLog get(final int id) { - return jobAuditLogRepository.findById(id).orElse(null); - } - - - /** - * Saves or updates the given series. - */ - public void save(final VGLJobAuditLog jobAuditLog) { - jobAuditLogRepository.save(jobAuditLog); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/VGLPurchaseService.java b/src/main/java/org/auscope/portal/server/web/service/VGLPurchaseService.java deleted file mode 100644 index 542123a58..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/VGLPurchaseService.java +++ /dev/null @@ -1,58 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.util.List; - - -import org.auscope.portal.server.vegl.VGLDataPurchase; -import org.auscope.portal.server.vegl.VGLJobPurchase; -import org.auscope.portal.server.web.repositories.VGLDataPurchaseRepository; -import org.auscope.portal.server.web.repositories.VGLJobPurchaseRepository; -import org.auscope.portal.server.web.security.PortalUser; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -@Service -public class VGLPurchaseService { - - @Autowired - private VGLDataPurchaseRepository dataPurchaseRepository; - - @Autowired - private VGLJobPurchaseRepository jobPurchaseRepository; - - - /** - * Retrieves the data purchases for a user - * @param user - */ - public List getDataPurchasesByUser(final PortalUser user) { - return dataPurchaseRepository.findByParentOrderByDateDesc(user); - } - - - /** - * Saves a data purchase. - */ - public Integer saveDataPurchase(final VGLDataPurchase purchase) { - VGLDataPurchase savedPurchase = dataPurchaseRepository.saveAndFlush(purchase); - return savedPurchase.getId(); - } - - /** - * Retrieves the data purchases for a user - * @param user - */ - public List getJobPurchasesByUser(final PortalUser user) { - return jobPurchaseRepository.findByParent(user); - } - - - /** - * Saves a data purchase. - */ - public Integer saveJobPurchase(final VGLJobPurchase purchase) { - VGLJobPurchase savedPurchase = jobPurchaseRepository.saveAndFlush(purchase); - return savedPurchase.getId(); - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/cloud/CloudComputeServiceNci.java b/src/main/java/org/auscope/portal/server/web/service/cloud/CloudComputeServiceNci.java deleted file mode 100644 index 558e1a530..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/cloud/CloudComputeServiceNci.java +++ /dev/null @@ -1,364 +0,0 @@ -package org.auscope.portal.server.web.service.cloud; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.UnsupportedEncodingException; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLDecoder; -import java.nio.charset.StandardCharsets; -import java.text.MessageFormat; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.core.cloud.ComputeType; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.service.cloud.SshCloudConnector.ExecResult; - -import com.jcraft.jsch.Session; - -/** - * Service class wrapper for interacting with a remote cloud compute service - * using CloudJob objects. - * - * @author Carsten Friedrich - */ -public class CloudComputeServiceNci extends CloudComputeService { - - /** - * Any getStatus request on a job whose submission time is less than - * STATUS_PENDING_SECONDS seconds away from the current time will be forced - * to return a Pending status (ignoring any status checks) - * - * This is to avoid missing errors occurring when AWS hasn't fully caught up - * to the new VM. - */ - public static final long STATUS_PENDING_SECONDS = 30; - - public static final String JOB_ID_FILE = ".jobid"; - - public static final String ANNOTATION_STORAGE_REQUIRED = "nci-storage-required="; - - @SuppressWarnings("unused") - private final Log logger = LogFactory.getLog(getClass()); - private CloudStorageServiceNci storageService; - private SshCloudConnector sshCloudConnector; - - /** - * Creates a new instance with the specified credentials - * - * @param endpoint - * (URL) The location of the Compute (Nova) service - * @param accessKey - * The Compute Access key (user name) - * @param secretKey - * The Compute Secret key (password) - * @param apiVersion - * The API version - */ - public CloudComputeServiceNci(CloudStorageServiceNci storageService, String endpoint) { - super(ProviderType.GADI, endpoint, null); - this.storageService=storageService; - this.sshCloudConnector = new SshCloudConnector(endpoint); - } - - /** - * We cant rely on the submitted instance ID to be the actual running job ID as our jobs will - * finish and spawn followup jobs on different queues. This method will go to the underlying storage - * service and retrieve the ID of the latest running job (as reported by the jobs themselves) - * @param job - * @return - * @throws PortalServiceException - */ - private String getJobLastInstanceId(CloudJob job) throws PortalServiceException { - try (InputStream is = storageService.getJobFile(job, JOB_ID_FILE)) { - if (is == null) { - return job.getComputeInstanceId(); - } - return IOUtils.toString(is, StandardCharsets.UTF_8); - } catch (IOException e) { - throw new PortalServiceException("Unable to access job ID file for " + job.getId(), e); - } - } - - /** - * Loads the bootstrap shell script template as a string. - * @return - * @throws IOException - */ - private String getNamedResourceString(String name) throws IOException { - try (InputStream is = this.getClass().getResourceAsStream(name)) { - String template = IOUtils.toString(is, StandardCharsets.UTF_8); - return template.replaceAll("\r", ""); // Normalise to Unix style line endings - } - } - - /** - * Takes a walltime of minutes an converts it to a PBS walltime string in the form of HH:MM:SS - * @param minutes - * @return - */ - private String wallTimeToString(Integer minutes) { - if (minutes == null || minutes == 0) { - return "01:00:00"; //default to 1 hour - } - int seconds = minutes * 60; - - return String.format("%1$02d:%2$02d:%3$02d", seconds / 3600, (seconds % 3600) / 60, seconds % 60); - } - - /** - * Takes an encoding of a compute type in the form of param1=value1¶m2=value2 (URL parameter encoding) - * and returns the value for the specified param (returns empty string if it DNE) - * @param param - * @param computeType - * @return - */ - private String extractParamFromComputeType(String param, String computeType) { - String[] parts = computeType.split("&"); - - for (String part : parts) { - String[] kvp = part.split("="); - - if (kvp[0].equals(param)) { - try { - return URLDecoder.decode(kvp[1], "UTF-8"); - } catch (UnsupportedEncodingException e) { - logger.error("Unable to decode compute type into UTF8", e); - } - } - } - - return ""; - } - - /** - * Retrieve the HPC qsub script fragment from URL stored as the "image" ID. - * - * @param hpcImageId String URL of the QSub fragment - * @return String QSub fragment to set up toolbox deps and environment - * @throws PortalServiceException - */ - private String hpcImageToString(String hpcImageId) throws PortalServiceException { - try { - URL url = new URL(hpcImageId); - return IOUtils.toString(url, StandardCharsets.UTF_8); - } - catch (MalformedURLException mue) { - throw new PortalServiceException("Invalid URL used for HPC Toolbox ImageID: " + hpcImageId, mue); - } - catch (IOException ioe) { - throw new PortalServiceException("Failed to retrieve HPC dependencies fragment from ImageID URL: " + hpcImageId, ioe); - } - } - - /** - * Return extra filesystems request string, if any, for job. - * - * Returns an empty string if none required. - * - */ - private String extraFilesystemsRequest(VEGLJob job) { - if (job != null) { - for (String annotation: job.getAnnotations()) { - if (annotation.toLowerCase().startsWith(ANNOTATION_STORAGE_REQUIRED)) { - return annotation.substring(ANNOTATION_STORAGE_REQUIRED.length()); - } - } - } - - return ""; - } - - /** - * Creates our bootstrap jobs/files templated for the specified job in the specified job's storage location - * @param job - * @throws PortalServiceException - * @throws IOException - */ - private void initialiseWorkingDirectory(VEGLJob job) throws PortalServiceException, IOException { - String runCommand = job.getComputeVmRunCommand(); - if (StringUtils.isBlank(runCommand)) { - runCommand = "python"; - } - String utilFileContents = getNamedResourceString("nci-util.sh"); - String wallTimeString = wallTimeToString(job.getWalltime()); - String hpcImageFragment = hpcImageToString(job.getComputeVmId()); - String downloadJobContents = MessageFormat.format(getNamedResourceString("nci-download.job.tpl"), new Object[] { - job.getProperty(NCIDetails.PROPERTY_NCI_PROJECT), - job.getId(), - storageService.getWorkingJobDirectory(job), - storageService.getOutputJobDirectory(job), - wallTimeString - }); - - String runJobContents = MessageFormat.format(getNamedResourceString("nci-run.job.tpl"), new Object[] { - job.getProperty(NCIDetails.PROPERTY_NCI_PROJECT), - job.getId(), - storageService.getWorkingJobDirectory(job), - storageService.getOutputJobDirectory(job), - wallTimeString, - extractParamFromComputeType("ncpus", job.getComputeInstanceType()), - extractParamFromComputeType("mem", job.getComputeInstanceType()), - extractParamFromComputeType("jobfs", job.getComputeInstanceType()), - hpcImageFragment, - runCommand, - extraFilesystemsRequest(job) - }); - - //storageService.uploadJobFile(job, files); - storageService.uploadJobFile(job, "nci-util.sh", new ByteArrayInputStream(utilFileContents.getBytes(StandardCharsets.UTF_8))); - storageService.uploadJobFile(job, "nci-download.job", new ByteArrayInputStream(downloadJobContents.getBytes(StandardCharsets.UTF_8))); - storageService.uploadJobFile(job, "nci-run.job", new ByteArrayInputStream(runJobContents.getBytes(StandardCharsets.UTF_8))); - } - - @Override - public String executeJob(CloudJob job, String userDataString) throws PortalServiceException { - String workingDir = storageService.getOutputJobDirectory(job); - Session session= null; - - if (!(job instanceof VEGLJob)) { - throw new PortalServiceException("job must be an instance of VEGLJob"); - } - - try { - session = sshCloudConnector.getSession(job); - - initialiseWorkingDirectory((VEGLJob) job); - ExecResult res = sshCloudConnector.executeCommand(session, "qsub nci-download.job", workingDir); - if(res.getExitStatus() != 0) { - throw new PortalServiceException("Could not submit job file: " + res.getErr()); - } - - return res.getOut(); - } catch (IOException e) { - throw new PortalServiceException("Error executing job " + job.getId(), e); - } finally { - if (session != null) { - session.disconnect(); - } - } - } - - /** - * Makes a request that the VM started by job be terminated - * - * @param job - * The job whose execution should be terminated - * @throws PortalServiceException - */ - @Override - public void terminateJob(CloudJob job) throws PortalServiceException { - Session session= null; - try { - String runningJobId = getJobLastInstanceId(job); - - session = sshCloudConnector.getSession(job); - - ExecResult res = sshCloudConnector.executeCommand(session, "qdel " + runningJobId); - if(res.getExitStatus() != 0) { - throw new PortalServiceException("Could not delete job: "+res.getErr()); - } - } finally { - if (session != null) { - session.disconnect(); - } - } - } - - /** - * An array of compute types that are available through this compute service - */ - @Override - public ComputeType[] getAvailableComputeTypes(Integer minimumVCPUs, Integer minimumRamMB, - Integer minimumRootDiskGB) { - return new ComputeType[0]; - } - - /** - * Will attempt to tail and return the last {@code numLines} from the given - * servers console. - * - * @param job - * the job which has been executed by this service - * @param numLines - * the number of console lines to return - * @return console output as string or null - * @return - */ - @Override - public String getConsoleLog(CloudJob job, int numLines) throws PortalServiceException { - - Session session= null; - try { - String runningJobId = getJobLastInstanceId(job); - - session = sshCloudConnector.getSession(job); - ExecResult res = sshCloudConnector.executeCommand(session, "qcat " + runningJobId); - if (res.getExitStatus() != 0) { - if (res.getOut().contains("Job is not running") || - res.getOut().contains("Job has finished")) - return ""; - - throw new PortalServiceException("Could not query job log: "+res.getErr()); - } - - return res.getOut(); - } finally { - if (session != null) { - session.disconnect(); - } - } - } - - /** - * Attempts to lookup low level status information about this job's compute - * instance from the remote cloud. - * - * Having no computeInstanceId set will result in an exception being thrown. - * - * @param job - * @return - * @throws PortalServiceException - */ - @Override - public InstanceStatus getJobStatus(CloudJob job) throws PortalServiceException { - - Session session= null; - try { - String runningJobId = getJobLastInstanceId(job); - - session = sshCloudConnector.getSession(job); - ExecResult res = sshCloudConnector.executeCommand(session, "qstat -s " + runningJobId); - if (res.getExitStatus() != 0) { - String errMsg = res.getErr(); - if (errMsg.contains("Job has finished") || errMsg.contains("Unknown Job Id")) { - return InstanceStatus.Missing; - } - throw new PortalServiceException("Could not query job status for job '"+job.getComputeInstanceId()+"': "+res.getErr()); - } - - if (res.getOut().contains("Job has finished")) { - return InstanceStatus.Missing; - } - - return InstanceStatus.Running; - } finally { - if (session != null) { - session.disconnect(); - } - } - } - - @Override - public ComputeType[] getAvailableComputeTypes(String machineImageId) { - return getAvailableComputeTypes(); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/cloud/CloudStorageServiceNci.java b/src/main/java/org/auscope/portal/server/web/service/cloud/CloudStorageServiceNci.java deleted file mode 100644 index 48d356a5e..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/cloud/CloudStorageServiceNci.java +++ /dev/null @@ -1,339 +0,0 @@ -/** - * - */ -package org.auscope.portal.server.web.service.cloud; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Vector; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudDirectoryInformation; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.cloud.CloudFileOwner; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.server.web.security.NCIDetails; - -import com.jcraft.jsch.Channel; -import com.jcraft.jsch.ChannelSftp; -import com.jcraft.jsch.ChannelSftp.LsEntry; -import com.jcraft.jsch.JSchException; -import com.jcraft.jsch.Session; -import com.jcraft.jsch.SftpATTRS; -import com.jcraft.jsch.SftpException; - -/** - * Cloud storage service for reading/writing files to NCI's lustre file system over SSH - * - * - * @author fri096 - * @author Josh Vote (CSIRO) - * - */ -public class CloudStorageServiceNci extends CloudStorageService { - private final Log logger = LogFactory.getLog(getClass()); - - private SshCloudConnector sshCloudConnector; - - public CloudStorageServiceNci(String endpoint, String provider) { - super(endpoint, provider, null); - this.sshCloudConnector = new SshCloudConnector(endpoint); - } - - /** - * Gets the full POSIX path to the working directory of the job (where the downloaded files will be stored) - * @param job - * @return - */ - public String getWorkingJobDirectory(CloudFileOwner job) { - return String.format("/scratch/%1$s/vl-workingdir/%2$s", job.getProperty(NCIDetails.PROPERTY_NCI_PROJECT), jobToBaseKey(job)); - } - - /** - * Gets the full POSIX path to the output directory of the job (where the scripts and output files will be stored) - * @param job - * @return - */ - public String getOutputJobDirectory(CloudFileOwner job) { - return String.format("/g/data/%1$s/vl-jobs/%2$s", job.getProperty(NCIDetails.PROPERTY_NCI_PROJECT), jobToBaseKey(job)); - } - - private boolean jobFileExists(ChannelSftp c, String fullPath) throws PortalServiceException { - try { - return c.ls(fullPath).size() > 0; // JSCH always throws an exception on missing paths. This is a catchall in case the behavior changes - } catch (SftpException ex) { - return false; - } - } - - /* - * (non-Javadoc) - * - * @see - * org.auscope.portal.core.services.cloud.CloudStorageService#getJobFile(org - * .auscope.portal.core.cloud.CloudFileOwner, java.lang.String) - */ - @Override - public InputStream getJobFile(CloudFileOwner job, String fileName) throws PortalServiceException { - String fullPath = getOutputJobDirectory(job) + "/" + fileName; - try { - Session session = sshCloudConnector.getSession(job); - Channel channel = session.openChannel("sftp"); - channel.connect(); - ChannelSftp c = (ChannelSftp) channel; - if (jobFileExists(c, fullPath)) { - return new SshInputStream(session, c, c.get(fullPath)); - } else { - return null; - } - } catch (JSchException | SftpException e) { - throw new PortalServiceException(e.getMessage(), e); - } - } - - /* - * (non-Javadoc) - * - * @see - * org.auscope.portal.core.services.cloud.CloudStorageService#listJobFiles( - * org.auscope.portal.core.cloud.CloudFileOwner) - */ - @Override - public CloudFileInformation[] listJobFiles(CloudFileOwner job) throws PortalServiceException { - String fullPath = getOutputJobDirectory(job); - Session session = null; - Channel channel = null; - try { - session = sshCloudConnector.getSession(job); - channel = session.openChannel("sftp"); - channel.connect(); - ChannelSftp c = (ChannelSftp) channel; - Vector files = c.ls(fullPath); - ArrayList res = new ArrayList<>(files.size()); - for (LsEntry entry : files) { - String fileName = entry.getFilename(); - if (fileName.startsWith(".")) { - continue; - } - res.add(new CloudFileInformation(entry.getFilename(), entry.getAttrs().getSize(), null)); - } - return res.toArray(new CloudFileInformation[0]); - } catch (JSchException | SftpException e) { - throw new PortalServiceException("Error listing job " + job.getId() + " files at " + fullPath , e); - } finally { - if (channel != null) { - channel.disconnect(); - } - if (session != null) { - session.disconnect(); - } - } - } - - @Override - public CloudDirectoryInformation listJobDirectoriesAndFiles(CloudFileOwner job, CloudDirectoryInformation cloudDirectory) throws PortalServiceException { - String fullPath = getOutputJobDirectory(job); - if(cloudDirectory != null) { - fullPath = fullPath + "/" + cloudDirectory.getPath(); - } - Session session = null; - Channel channel = null; - try { - session = sshCloudConnector.getSession(job); - channel = session.openChannel("sftp"); - channel.connect(); - ChannelSftp c = (ChannelSftp) channel; - Vector files = c.ls(fullPath); - ArrayList res = new ArrayList<>(files.size()); - for (LsEntry entry : files) { - String fileName = entry.getFilename(); - if (fileName.startsWith(".")) { - continue; - } - SftpATTRS attribs = entry.getAttrs(); - if (attribs.isDir()) { - CloudDirectoryInformation childDirectory = new CloudDirectoryInformation(fileName, cloudDirectory); - cloudDirectory.addDirectory(listJobDirectoriesAndFiles(job, childDirectory)); - } else { - cloudDirectory.addFile(new CloudFileInformation(entry.getFilename(), entry.getAttrs().getSize(), null)); - } - } - return cloudDirectory; - } catch (JSchException | SftpException e) { - throw new PortalServiceException("Error listing job " + job.getId() + " files at " + fullPath , e); - } finally { - if (channel != null) { - channel.disconnect(); - } - if (session != null) { - session.disconnect(); - } - } - } - - public void rmDirRecursive(Session session, ChannelSftp channel, String path) throws SftpException { - Vector files = channel.ls(path); - - for (LsEntry entry : files) { - String filename = entry.getFilename(); - if(filename.equals(".")||filename.equals("..")) { - continue; - } - - String entryFullPath = path+"/"+filename; - logger.debug("Deleting: "+entryFullPath); - if (entry.getAttrs().isDir()) { - rmDirRecursive(session, channel, entry.getLongname()); - channel.rmdir(entryFullPath); - } else { - channel.rm(entryFullPath); - } - } - - channel.rmdir(path); - } - - /* - * (non-Javadoc) - * - * @see - * org.auscope.portal.core.services.cloud.CloudStorageService#deleteJobFiles - * (org.auscope.portal.core.cloud.CloudFileOwner) - */ - @Override - public void deleteJobFiles(CloudFileOwner job) throws PortalServiceException { - String fullPath = getOutputJobDirectory(job); - Session session = null; - Channel channel = null; - - try { - session = sshCloudConnector.getSession(job); - channel = session.openChannel("sftp"); - channel.connect(); - ChannelSftp c = (ChannelSftp) channel; - - rmDirRecursive(session, c, fullPath); - } catch (JSchException | SftpException e) { - throw new PortalServiceException(e.getMessage(), e); - } finally { - if (session != null) { - session.disconnect(); - } - if (channel != null) { - channel.disconnect(); - } - } - } - - /* - * (non-Javadoc) - * - * @see org.auscope.portal.core.services.cloud.CloudStorageService# - * getJobFileMetadata(org.auscope.portal.core.cloud.CloudFileOwner, - * java.lang.String) - */ - @Override - public CloudFileInformation getJobFileMetadata(CloudFileOwner job, String fileName) throws PortalServiceException { - String fullPath = getOutputJobDirectory(job)+"/"+fileName; - Session session = null; - Channel channel = null; - try { - session = sshCloudConnector.getSession(job); - channel = session.openChannel("sftp"); - channel.connect(); - ChannelSftp c = (ChannelSftp) channel; - SftpATTRS attr = c.lstat(fullPath); - return new CloudFileInformation(fileName, attr.getSize(), null); - } catch (JSchException | SftpException e) { - throw new PortalServiceException(e.getMessage(), e); - } finally { - if (channel != null) { - channel.disconnect(); - } - if (session != null) { - session.disconnect(); - } - } - } - - /* - * (non-Javadoc) - * - * @see - * org.auscope.portal.core.services.cloud.CloudStorageService#uploadJobFiles - * (org.auscope.portal.core.cloud.CloudFileOwner, java.io.File[]) - */ - @Override - public void uploadJobFiles(CloudFileOwner job, File[] files) throws PortalServiceException { - String fullPath = getOutputJobDirectory(job); - Session session = null; - Channel channel = null; - - try { - session = sshCloudConnector.getSession(job); - sshCloudConnector.createDirectory(session, fullPath); - - channel = session.openChannel("sftp"); - channel.connect(); - ChannelSftp c = (ChannelSftp) channel; - c.cd(fullPath); - - for (File file : files) { - try (InputStream in= new FileInputStream(file)) { - c.put(in, file.getName()); - } catch (IOException e) { - throw new PortalServiceException(e.getMessage(),e); - } - } - } catch (JSchException | SftpException e) { - throw new PortalServiceException(e.getMessage(), e); - } finally { - if (session != null) { - session.disconnect(); - } - if (channel != null) { - channel.disconnect(); - } - } - } - - /* - * (non-Javadoc) - * - * @see - * org.auscope.portal.core.services.cloud.CloudStorageService#uploadJobFiles - * (org.auscope.portal.core.cloud.CloudFileOwner, java.io.File[]) - */ - @Override - public void uploadJobFile(CloudFileOwner job, String name, InputStream data) throws PortalServiceException { - String fullPath = getOutputJobDirectory(job); - Session session = null; - Channel channel = null; - - try { - session = sshCloudConnector.getSession(job); - sshCloudConnector.createDirectory(session, fullPath); - - channel = session.openChannel("sftp"); - channel.connect(); - ChannelSftp c = (ChannelSftp) channel; - c.cd(fullPath); - - c.put(data, name); - } catch (JSchException | SftpException e) { - throw new PortalServiceException(e.getMessage(), e); - } finally { - if (session != null) { - session.disconnect(); - } - if (channel != null) { - channel.disconnect(); - } - } - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/cloud/IdentityString.java b/src/main/java/org/auscope/portal/server/web/service/cloud/IdentityString.java deleted file mode 100644 index 2809edbf8..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/cloud/IdentityString.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * - */ -package org.auscope.portal.server.web.service.cloud; - -import com.jcraft.jsch.Identity; -import com.jcraft.jsch.JSch; -import com.jcraft.jsch.JSchException; -import com.jcraft.jsch.KeyPair; - -/** - * @author fri096 - * - */ -public class IdentityString implements Identity { - - private KeyPair kpair; - - public IdentityString(JSch jsch, String prvkey) throws JSchException { - kpair = KeyPair.load(jsch, prvkey.getBytes(), null); - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#setPassphrase(byte[]) - */ - @Override - public boolean setPassphrase(byte[] passphrase) throws JSchException { - return kpair.decrypt(passphrase); - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#getPublicKeyBlob() - */ - @Override - public byte[] getPublicKeyBlob() { - return kpair.getPublicKeyBlob(); - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#getSignature(byte[]) - */ - @Override - public byte[] getSignature(byte[] data) { - return kpair.getSignature(data); - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#decrypt() - */ - @Override - public boolean decrypt() { - throw new RuntimeException("not implemented"); - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#getAlgName() - */ - @Override - public String getAlgName() { - return "ssh-rsa"; -// return new String(kpair.getKeyTypeName()); - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#getName() - */ - @Override - public String getName() { - // TODO Auto-generated method stub - return null; - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#isEncrypted() - */ - @Override - public boolean isEncrypted() { - return kpair.isEncrypted(); - } - - /* (non-Javadoc) - * @see com.jcraft.jsch.Identity#clear() - */ - @Override - public void clear() { - kpair.dispose(); - kpair = null; - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/cloud/SshCloudConnector.java b/src/main/java/org/auscope/portal/server/web/service/cloud/SshCloudConnector.java deleted file mode 100644 index d3a4d0119..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/cloud/SshCloudConnector.java +++ /dev/null @@ -1,272 +0,0 @@ -/** - * - */ -package org.auscope.portal.server.web.service.cloud; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudFileOwner; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.server.web.security.NCIDetails; - -import com.jcraft.jsch.Channel; -import com.jcraft.jsch.ChannelExec; -import com.jcraft.jsch.JSch; -import com.jcraft.jsch.JSchException; -import com.jcraft.jsch.Session; - -/** - * @author fri096 - * - */ -public class SshCloudConnector { - private final Log logger = LogFactory.getLog(getClass()); - - private String endPoint; - - public SshCloudConnector(String endPoint) { - this.endPoint= endPoint; - } - - public class ExecResult { - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "ExecResult [out=" + out + ", err=" + err + ", exitStatus=" + exitStatus + "]"; - } - - private String out; - - public ExecResult(String out, String err, int exitStatus) { - super(); - this.out = out; - this.err = err; - this.exitStatus = exitStatus; - } - - /** - * @return the out - */ - public String getOut() { - return out; - } - - /** - * @param out - * the out to set - */ - public void setOut(String out) { - this.out = out; - } - - /** - * @return the err - */ - public String getErr() { - return err; - } - - /** - * @param err - * the err to set - */ - public void setErr(String err) { - this.err = err; - } - - /** - * @return the exitStatus - */ - public int getExitStatus() { - return exitStatus; - } - - /** - * @param exitStatus - * the exitStatus to set - */ - public void setExitStatus(int exitStatus) { - this.exitStatus = exitStatus; - } - - private String err; - private int exitStatus; - } - - String readStream(InputStream in, Channel channel) throws IOException, InterruptedException { - StringBuilder res = new StringBuilder(); - byte[] tmp = new byte[1024]; - while (true) { - while (in.available() > 0) { - int i = in.read(tmp, 0, 1024); - if (i < 0) { - break; - } - res.append(new String(tmp, 0, i)); - } - if (channel.isClosed()) { - if (in.available() > 0) { - continue; - } - break; - } - Thread.sleep(1000); - } - return res.toString(); - } - - public ExecResult executeCommand(Session session, String command) throws PortalServiceException { - return executeCommand(session, command, null); - } - - /** - * Retrieves the session for the specified Job. - * @param job - * @return - * @throws PortalServiceException - */ - public Session getSession(CloudFileOwner job) throws PortalServiceException { - try { - JSch jsch = new JSch(); - String prvkey = job.getProperty(NCIDetails.PROPERTY_NCI_KEY); - jsch.addIdentity(new IdentityString(jsch, prvkey), null); - String userName = job.getProperty(NCIDetails.PROPERTY_NCI_USER); - Session session = jsch.getSession(userName, endPoint, 22); - session.setConfig("StrictHostKeyChecking", "no"); - if (!session.isConnected()) { - session.connect(); - } - - return session; - } catch (JSchException ex) { - logger.error("Unable to retrieve SSH session for job " + job.getId() + ":" + ex.getMessage()); - logger.debug("Exception:", ex); - throw new PortalServiceException("Unable to retrieve SSH session for job " + job.getId(), ex); - } - - } - - public ExecResult executeCommand(Session session, String command, String workingDir) throws PortalServiceException { - ChannelExec channel = null; - if(workingDir!=null) { - command = "cd "+workingDir+"; "+command; - } - - try { - channel = (ChannelExec) session.openChannel("exec"); - channel.setCommand(command); - channel.setInputStream(null); - channel.setErrStream(null); - - channel.connect(); - - try (InputStream out = channel.getInputStream(); - InputStream err = channel.getErrStream()) { - String outStr = readStream(out, channel); - String errStr = readStream(err, channel); - return new ExecResult(outStr, errStr, channel.getExitStatus()); - } catch (IOException | InterruptedException e) { - throw new PortalServiceException(e.getMessage(), e); - } - } catch (JSchException e) { - throw new PortalServiceException(e.getMessage(), e); - } finally { - if (channel != null) { - channel.disconnect(); - } - } - } - - public void createDirectory(Session session, String dirName) throws PortalServiceException { - String command = "umask 002; mkdir -m 770 -p " + dirName; - ExecResult res = executeCommand(session, command); - if (res.getExitStatus() > 0) { - throw new PortalServiceException("command '" + command + "' returned status" + res.getExitStatus() + " : stderr: " + res.getErr()); - } - } - - - static int checkAck(InputStream in) throws IOException, PortalServiceException { - int b = in.read(); - // b may be 0 for success, - // 1 for error, - // 2 for fatal error, - // -1 - if (b == 0) - return b; - if (b == -1) - return b; - - if (b == 1 || b == 2) { - StringBuilder sb = new StringBuilder(); - int c; - do { - c = in.read(); - sb.append((char) c); - } while (c != '\n'); - if (b == 1) { // error - throw new PortalServiceException("SSH ACK error: " + sb.toString()); - } - if (b == 2) { // fatal error - throw new PortalServiceException("SSH ACK fatal error: " + sb.toString()); - } - } - return b; - } - - void scpStringToFile(Session session, String workingDir, String fileName, String userDataString) throws PortalServiceException { - String command = "scp -t " + workingDir+"/"+fileName; - - ChannelExec channel; - try { - channel = (ChannelExec) session.openChannel("exec"); - } catch (JSchException e1) { - throw new PortalServiceException(e1.getMessage(), e1); - } - channel.setCommand(command); - - // get I/O streams for remote scp - try (OutputStream out = channel.getOutputStream(); InputStream in = channel.getInputStream()) { - channel.connect(); - - checkAck(in); - - byte[] userData = userDataString.getBytes("UTF-8"); - - // send "C0644 filesize filename", where filename should not include '/' - long filesize = userData.length; - command = "C0644 " + filesize + " " + fileName; - // if (lfile.lastIndexOf('/') > 0) { - // command += lfile.substring(lfile.lastIndexOf('/') + 1); - // } else { - // command += lfile; - // } - command += "\n"; - out.write(command.getBytes()); - out.flush(); - - checkAck(in); - - out.write(userData); - out.write(0); - - out.flush(); - checkAck(in); - out.close(); - - } catch (IOException | JSchException e) { - throw new PortalServiceException(e.getMessage(), e); - } - - channel.disconnect(); - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/cloud/SshInputStream.java b/src/main/java/org/auscope/portal/server/web/service/cloud/SshInputStream.java deleted file mode 100644 index c8f42084d..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/cloud/SshInputStream.java +++ /dev/null @@ -1,130 +0,0 @@ -/** - * - */ -package org.auscope.portal.server.web.service.cloud; - -import java.io.IOException; -import java.io.InputStream; - -import com.jcraft.jsch.ChannelSftp; -import com.jcraft.jsch.Session; - -/** - * @author fri096 - * - */ -public class SshInputStream extends InputStream { - - private Session session; - private ChannelSftp channel; - private InputStream inputStream; - - /** - * @return - * @throws IOException - * @see java.io.InputStream#available() - */ - @Override - public int available() throws IOException { - return inputStream.available(); - } - - /** - * @throws IOException - * @see java.io.InputStream#close() - */ - @Override - public void close() throws IOException { - inputStream.close(); - channel.disconnect(); - session.disconnect(); - } - - /** - * @param arg0 - * @see java.io.InputStream#mark(int) - */ - @Override - public synchronized void mark(int arg0) { - inputStream.mark(arg0); - } - - /** - * @return - * @see java.io.InputStream#markSupported() - */ - @Override - public boolean markSupported() { - return inputStream.markSupported(); - } - - /** - * @param arg0 - * @param arg1 - * @param arg2 - * @return - * @throws IOException - * @see java.io.InputStream#read(byte[], int, int) - */ - @Override - public int read(byte[] arg0, int arg1, int arg2) throws IOException { - return inputStream.read(arg0, arg1, arg2); - } - - /** - * @param arg0 - * @return - * @throws IOException - * @see java.io.InputStream#read(byte[]) - */ - @Override - public int read(byte[] arg0) throws IOException { - return inputStream.read(arg0); - } - - /** - * @throws IOException - * @see java.io.InputStream#reset() - */ - @Override - public synchronized void reset() throws IOException { - inputStream.reset(); - } - - /** - * @param arg0 - * @return - * @throws IOException - * @see java.io.InputStream#skip(long) - */ - @Override - public long skip(long arg0) throws IOException { - return inputStream.skip(arg0); - } - - /** - * @return - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return inputStream.toString(); - } - - public SshInputStream(Session session, ChannelSftp channel, InputStream inputStream) { - if(session!=null) - this.session=session; - if(channel!=null) - this.channel=channel; - this.inputStream=inputStream; - } - - /* (non-Javadoc) - * @see java.io.InputStream#read() - */ - @Override - public int read() throws IOException { - return inputStream.read(); - } - -} diff --git a/src/main/java/org/auscope/portal/server/web/service/monitor/VGLJobStatusChangeHandler.java b/src/main/java/org/auscope/portal/server/web/service/monitor/VGLJobStatusChangeHandler.java deleted file mode 100644 index 2e4b3320a..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/monitor/VGLJobStatusChangeHandler.java +++ /dev/null @@ -1,108 +0,0 @@ -package org.auscope.portal.server.web.service.monitor; - -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.core.services.cloud.monitor.JobStatusChangeListener; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.auscope.portal.server.vegl.mail.JobMailSender; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.service.ANVGLProvenanceService; - -/** - * A handler that provides the concrete implementation of - * JobStatusChangeListener. - * - * It uses VEGLJobManager to update job status and to create job - * audit trail record. In addition, it uses JobMailSender to - * send out email notification upon job processing. - * - * @author Richard Goh - */ -public class VGLJobStatusChangeHandler implements JobStatusChangeListener { - private final Log LOG = LogFactory.getLog(getClass()); - - private VEGLJobManager jobManager; - private JobMailSender jobMailSender; - private VGLJobStatusAndLogReader jobStatusLogReader; - private ANVGLProvenanceService anvglProvenanceService; - - public VGLJobStatusChangeHandler(VEGLJobManager jobManager, - JobMailSender jobMailSender, VGLJobStatusAndLogReader jobStatusLogReader, - ANVGLProvenanceService anvglProvenanceService) { - this.jobManager = jobManager; - this.jobMailSender = jobMailSender; - this.jobStatusLogReader = jobStatusLogReader; - this.anvglProvenanceService = anvglProvenanceService; - } - - public VGLJobStatusAndLogReader getJobStatusLogReader() { - return jobStatusLogReader; - } - - @Override - public void handleStatusChange(CloudJob job, String newStatus, String oldStatus) { - if (!newStatus.equals(JobBuilderController.STATUS_UNSUBMITTED)) { - VEGLJob vglJob = (VEGLJob)job; - vglJob.setProcessDate(new Date()); - try { - this.setProcessDuration(vglJob,newStatus); - } catch (Throwable ex) { - LOG.debug("Unable to set process duration for" + job, ex); - } - vglJob.setStatus(newStatus); - // Execution time, only accurate to 5 minutes and may not be set - // for short jobs so will be set later from the job log - if(newStatus.equals(JobBuilderController.STATUS_PENDING) || - newStatus.equals(JobBuilderController.STATUS_ACTIVE)) - vglJob.setExecuteDate(new Date()); - jobManager.saveJob(vglJob); - jobManager.createJobAuditTrail(oldStatus, vglJob, "Job status updated."); - - //VT: only status done we email here. Any error notification are mailed not by polling but - //when the job has it status set to error; - if ((newStatus.equals(JobBuilderController.STATUS_DONE) && vglJob.getEmailNotification()) || - newStatus.equals(JobBuilderController.STATUS_ERROR) || - newStatus.equals(JobBuilderController.STATUS_WALLTIME_EXCEEDED)) { - jobMailSender.sendMail(vglJob); - LOG.trace("Job completion email notification sent. Job id: " + vglJob.getId()); - } - // Job successfully completed - if(newStatus.equals(JobBuilderController.STATUS_DONE)) { - // Provenance - String reportUrl = anvglProvenanceService.createEntitiesForOutputs(vglJob); - if(!reportUrl.equals("")) { - vglJob.setPromsReportUrl(reportUrl); - } - // Get job execution date/time from log - String execDateLog = jobStatusLogReader.getSectionedLog(vglJob, "Execute"); - if(execDateLog != null) { - execDateLog = execDateLog.trim(); - SimpleDateFormat formatter = new SimpleDateFormat("dd/MM/yyyy'T'hh:mm:ss"); - try { - Date d = formatter.parse(execDateLog); - vglJob.setExecuteDate(d); - } catch(ParseException pe) { - LOG.warn("Unable to read job execution date from log file"); - } - } - jobManager.saveJob(vglJob); - } - } - } - - public void setProcessDuration(VEGLJob job,String newStatus){ - if (newStatus.equals(JobBuilderController.STATUS_DONE) || - newStatus.equals(JobBuilderController.STATUS_ERROR) || - newStatus.equals(JobBuilderController.STATUS_WALLTIME_EXCEEDED)){ - String time = this.jobStatusLogReader.getSectionedLog(job, "Time"); - job.setProcessTimeLog(time); - } - } -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/web/service/monitor/VGLJobStatusMonitor.java b/src/main/java/org/auscope/portal/server/web/service/monitor/VGLJobStatusMonitor.java deleted file mode 100644 index b7647bb5d..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/monitor/VGLJobStatusMonitor.java +++ /dev/null @@ -1,91 +0,0 @@ -package org.auscope.portal.server.web.service.monitor; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.core.services.cloud.monitor.JobStatusMonitor; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; -import org.springframework.scheduling.quartz.QuartzJobBean; - -/** - * A task that monitors any pending or active VL jobs. It - * will trigger JobStatusChangeListener(s) to run when the - * job being processed changes its status. - * - * The timing for running this task is configured in - * applicationContext.xml file. - * - * It uses VEGLJobManager to retrieve pending or active job(s) - * from the database and VGLJobStatusAndLogReader to poll - * each job execution status from s3 cloud storage. - * - * @author Richard Goh - */ -public class VGLJobStatusMonitor extends QuartzJobBean { - private final Log LOG = LogFactory.getLog(getClass()); - - private VEGLJobManager jobManager; - private JobStatusMonitor jobStatusMonitor; - private PortalUserService jobUserService; - private NCIDetailsService nciDetailsService; - - - // Solely for testing - public void setNciDetailsService(NCIDetailsService nciDetailsService) { - this.nciDetailsService = nciDetailsService; - } - - public void setJobUserService(PortalUserService jobUserService) { - this.jobUserService = jobUserService; - } - - /** - * Sets the job manager to be used for querying - * pending or active jobs from VL DB. - * @param jobManager - */ - public void setJobManager(VEGLJobManager jobManager) { - this.jobManager = jobManager; - } - - /** - * Sets the JobStatusMonitor to be used by this class - * @param jobStatusMonitor - */ - public void setJobStatusMonitor(JobStatusMonitor jobStatusMonitor) { - this.jobStatusMonitor = jobStatusMonitor; - } - - @Override - protected void executeInternal(JobExecutionContext ctx) - throws JobExecutionException { - try { - List jobs = jobManager.getPendingOrActiveJobs(); - for (VEGLJob veglJob : jobs) { - PortalUser user = jobUserService.getByEmail(veglJob.getEmailAddress()); - veglJob.setProperty(CloudJob.PROPERTY_STS_ARN, user.getArnExecution()); - veglJob.setProperty(CloudJob.PROPERTY_CLIENT_SECRET, user.getAwsSecret()); - NCIDetails nciDetails = nciDetailsService.getByUser(user); - if (nciDetails != null) { - veglJob.setProperty(NCIDetails.PROPERTY_NCI_USER, nciDetails.getUsername()); - veglJob.setProperty(NCIDetails.PROPERTY_NCI_PROJECT, nciDetails.getProject()); - veglJob.setProperty(NCIDetails.PROPERTY_NCI_KEY, nciDetails.getKey()); - } - } - jobStatusMonitor.statusUpdate(jobs); - } catch (Exception ex) { - LOG.info(String.format("Error update jobs: %1$s", ex.getMessage())); - LOG.debug("Exception:", ex); - throw new JobExecutionException(ex); - } - } -} \ No newline at end of file diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/Dependency.java b/src/main/java/org/auscope/portal/server/web/service/scm/Dependency.java deleted file mode 100644 index 88f978b97..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/Dependency.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class Dependency { - - public static enum Type { - PUPPET, - REQUIREMENTS, - PYTHON, - TOOLBOX - } - - public Type type; - public String identifier; - public String version; - public String repository; -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/Entries.java b/src/main/java/org/auscope/portal/server/web/service/scm/Entries.java deleted file mode 100644 index 570c2143b..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/Entries.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import java.util.List; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class Entries { - private List problems; - private List solutions; - private List toolboxes; - - public List getProblems() { - return problems; - } - - public void setProblems(List problems) { - this.problems = problems; - } - - public List getSolutions() { - return solutions; - } - - public void setSolutions(List solutions) { - this.solutions = solutions; - } - - public List getToolboxes() { - return toolboxes; - } - - public void setToolboxes(List toolboxes) { - this.toolboxes = toolboxes; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/Entry.java b/src/main/java/org/auscope/portal/server/web/service/scm/Entry.java deleted file mode 100644 index 9ed58b1d5..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/Entry.java +++ /dev/null @@ -1,189 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.auscope.portal.core.services.PortalServiceException; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class Entry { - private String id; - private String name; - private String description; - private Date createdAt; - - // Flag to indicate we've been instantiated with a URI reference only, so we - // need to load our details from the server if properties are missing. - @JsonIgnore private boolean isReferenceOnly = false; - - final Log logger = LogFactory.getLog(getClass()); - - @JsonCreator - public Entry(String id) { - this(); - this.id = id; - isReferenceOnly = true; - } - - public Entry() {} - - @JsonProperty("@id") - public String getId() { return id; } - public void setId(String id) { this.id = id; } - - public String getUri() { return id; } - public void setUri(String uri) { this.id = uri; } - - @JsonProperty("created_at") - public Date getCreatedAt() { - if (isReferenceOnly) { - loadMissingProperties(); - } - return createdAt; - } - public void setCreatedAt(Date createdAt) { this.createdAt = createdAt; } - - // N.B. While only some Entries will have dependencies, we'll include them - // here to save having to create an abstract base class for "entries that - // can have dependencies". - private List dependencies; - - /** - * Return the list of dependencies for this Entry. - * - * @return List dependencies for this Entry. - */ - public List getDependencies() { - if (isReferenceOnly) { - loadMissingProperties(); - } - - return this.dependencies; - } - - // TODO: Map dependencies to 'dependencies' field in the json once it's - // migrated in the SSSC. - - /** - * Set the list of dependencies for this Entry. - * - * @param dependencies List new dependencies for this Entry. - */ - public void setDependencies(List dependencies) { - this.dependencies = dependencies; - - if (dependencies == null) { - this.dependencies = new ArrayList(); - } - } - - // Fallback to the old style "deps" property name for backwards compatibility - public List getDeps() { - return this.getDependencies(); - } - - public void setDeps(List dependencies) { - this.setDependencies(dependencies); - } - - /** - * @return the name - */ - public String getName() { - if (isReferenceOnly) { - loadMissingProperties(); - } - return name; - } - - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the description - */ - public String getDescription() { - if (isReferenceOnly) { - loadMissingProperties(); - } - return description; - } - - /** - * @param description the description to set - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * Resolve our URI and use resulting description to fill in missing property values. - * - */ - public void loadMissingProperties() { - Entry description = ScmLoaderFactory.getInstance().loadEntry(id, getClass()); - - try { - copyMissingProperties(description); - isReferenceOnly = false; - } - catch (PortalServiceException ex) { - logger.error("Failed to load missing properties.", ex); - } - } - - /** - * Copy missing property values from entry. - * - * @param entry Entry to copy values from - */ - public void copyMissingProperties(Entry entry) throws PortalServiceException { - if (!entry.getClass().isInstance(this)) { - throw new PortalServiceException( - String.format("Incompatible type passed to %s.copyMissingProperties(%s)", - getClass().getName(), (entry == null ? null : entry.getClass().getName()))); - } - - if (name == null) { - setName(entry.getName()); - } - - if (description == null) { - setDescription(entry.getDescription()); - } - - if (createdAt == null) { - setCreatedAt(entry.getCreatedAt()); - } - - if (dependencies == null) { - setDependencies(entry.getDependencies()); - } - } - - @Override - public int hashCode() { - return id.hashCode(); - } - - @Override - public boolean equals(Object that) { - if (this == that) return true; - - if (that == null || this.getClass() != that.getClass()) return false; - - return this.id.equals(((Entry)that).getId()); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/Problem.java b/src/main/java/org/auscope/portal/server/web/service/scm/Problem.java deleted file mode 100644 index b44aa65af..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/Problem.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import java.util.ArrayList; -import java.util.List; - -import org.auscope.portal.core.services.PortalServiceException; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -/** - * Problem from the Scientific Solution Centre. - * - * Adds solutions to Entry members. - */ -@JsonIgnoreProperties(ignoreUnknown = true) -public class Problem extends Entry { - private List solutions; - - public Problem() { super(); } - public Problem(String id) { super(id); } - - public List getSolutions() { - return solutions; - } - - public void setSolutions(List solutions) { - this.solutions = solutions; - - if (this.solutions == null) { - this.solutions = new ArrayList(); - } - } - - @Override - public void copyMissingProperties(Entry entry) throws PortalServiceException { - super.copyMissingProperties(entry); - if (solutions == null) { - setSolutions(((Problem)entry).getSolutions()); - } - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/ScmLoader.java b/src/main/java/org/auscope/portal/server/web/service/scm/ScmLoader.java deleted file mode 100644 index 823ab40ad..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/ScmLoader.java +++ /dev/null @@ -1,8 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -public interface ScmLoader { - public T loadEntry(String id, Class cls); - public Problem loadProblem(String id); - public Toolbox loadToolbox(String id); - public Solution loadSolution(String id); -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/ScmLoaderFactory.java b/src/main/java/org/auscope/portal/server/web/service/scm/ScmLoaderFactory.java deleted file mode 100644 index c35c2e464..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/ScmLoaderFactory.java +++ /dev/null @@ -1,20 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -public class ScmLoaderFactory { - private static ScmLoader instance; - private static final Log logger = LogFactory.getLog(ScmLoaderFactory.class); - - public static ScmLoader getInstance() { - return instance; - } - - public static void registerLoader(ScmLoader instance) { - if (ScmLoaderFactory.instance != null) { - logger.warn("Registered multiple instances of ScmLoader with factory."); - } - ScmLoaderFactory.instance = instance; - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/Solution.java b/src/main/java/org/auscope/portal/server/web/service/scm/Solution.java deleted file mode 100644 index 4ba083906..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/Solution.java +++ /dev/null @@ -1,97 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import java.util.List; -import java.util.Map; - -import org.auscope.portal.core.services.PortalServiceException; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.fasterxml.jackson.databind.util.StdConverter; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class Solution extends Entry { - private String template; - private String templateHash; - - @JsonSerialize(converter=ProblemURIConverver.class) - private Problem problem; - private List> variables; - - private static class ProblemURIConverver extends StdConverter { - - @Override - public String convert(Problem problem) { - return problem.getId(); - } - - } - - public Solution() { super(); } - public Solution(String id) { super(id); } - - /** - * @return the template - */ - public String getTemplate() { - return template; - } - /** - * @param template the template to set - */ - public void setTemplate(String template) { - this.template = template; - } - - /** - * @return the template hash - */ - @JsonProperty("template_hash") - public String getTemplateHash() { - return templateHash; - } - - /** - * @param templateHash the template hash to set - */ - public void setTemplateHash(String templateHash) { - this.templateHash = templateHash; - } - - public List> getVariables() { - return variables; - } - - public void setVariables(List> variables) { - this.variables = variables; - } - - /** - * Return the problem that this solves. - * - * @return Problem - */ - public Problem getProblem() { - return problem; - } - - /** - * Sets the problem that this is a solution for. - * - * @param problem Problem instance that this is a Solution for. - */ - public void setProblem(Problem problem) { - this.problem = problem; - } - - @Override - public void copyMissingProperties(Entry entry) throws PortalServiceException { - super.copyMissingProperties(entry); - Solution that = (Solution)entry; - if (template == null) { setTemplate(that.getTemplate()); } - if (templateHash == null) { setTemplateHash(that.getTemplateHash()); } - if (problem == null) { setProblem(that.getProblem()); } - if (variables == null) { setVariables(that.getVariables()); } - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/SsscImage.java b/src/main/java/org/auscope/portal/server/web/service/scm/SsscImage.java deleted file mode 100644 index a2090caa7..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/SsscImage.java +++ /dev/null @@ -1,38 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class SsscImage -{ - private String imageId; - private String provider; - private String command; - private Set annotations; - - @JsonProperty("image_id") - public String getImageId() { return imageId; } - public void setImageId(String imageId) { this.imageId = imageId; } - - public String getProvider() { return provider; } - public void setProvider(String provider) { this.provider = provider; } - - public String getCommand() { return command; } - public void setComment(String command) { this.command = command; } - - public Set getAnnotations() { return annotations; } - public void setAnnotations(Collection annotations) { - if (this.annotations == null) { - this.annotations = new HashSet(); - } else { - this.annotations.clear(); - } - - this.annotations.addAll(annotations); - } -} diff --git a/src/main/java/org/auscope/portal/server/web/service/scm/Toolbox.java b/src/main/java/org/auscope/portal/server/web/service/scm/Toolbox.java deleted file mode 100644 index 9d17ee82b..000000000 --- a/src/main/java/org/auscope/portal/server/web/service/scm/Toolbox.java +++ /dev/null @@ -1,84 +0,0 @@ -package org.auscope.portal.server.web.service.scm; - -import java.util.List; -import java.util.Map; - -import org.auscope.portal.core.services.PortalServiceException; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; - - -@JsonIgnoreProperties(ignoreUnknown = true) -public class Toolbox extends Entry { - private Map source; - private List images; - private String puppet; - private String puppetHash; - private String command; - - public Toolbox() { super(); } - public Toolbox(String id) { super(id); } - - public String getPuppet() { - return this.puppet; - } - - public void setPuppet(String puppet) { - this.puppet = puppet; - } - - @JsonProperty("puppet_hash") - public String getPuppetHash() { - return this.puppetHash; - } - - public void setPuppetHash(String puppetHash) { - this.puppetHash = puppetHash; - } - - /** - * @return the command - */ - public String getCommand() { - return command; - } - - /** - * @param command the command to set - */ - public void setCommand(String command) { - this.command = command; - } - - /** - * @return the source - */ - public Map getSource() { - return source; - } - /** - * @param source the source to set - */ - public void setSource(Map source) { - this.source = source; - } - - public List getImages() { - return images; - } - - public void setImages(List images) { - this.images = images; - } - - @Override - public void copyMissingProperties(Entry entry) throws PortalServiceException { - super.copyMissingProperties(entry); - Toolbox that = (Toolbox)entry; - if (puppet == null) { setPuppet(that.getPuppet()); } - if (puppetHash == null) { setPuppetHash(that.getPuppetHash()); } - if (source == null) { setSource(that.getSource()); } - if (images == null) { setImages(that.getImages()); } - } -} diff --git a/src/main/resources/application.yaml.default b/src/main/resources/application.yaml.default index 6d5ed1a02..73f5ad9f9 100755 --- a/src/main/resources/application.yaml.default +++ b/src/main/resources/application.yaml.default @@ -120,7 +120,6 @@ spring: callbackUrl: /login/aaf/callback # Register at AAF with /login/aaf/callback jwtsecret: AAF_SECRET # LC_CTYPE=C tr -dc '[[:alnum:][:punct:]]' < /dev/urandom - data: elasticsearch: repositories: @@ -136,42 +135,6 @@ spring: socket-timeout: 60000 # 60 seconds #connection-timeout: 4000 # 4 seconds - -# Cloud specific settings -cloud: - localStageInDir: C:\\temp\\vegl-portal-temp - sssc: - solutions: - url: https://sssc-vgl.geoanalytics.csiro.au/ - defaultToolbox: http://ec2-54-206-9-187.ap-southeast-2.compute.amazonaws.com/scm/toolbox/3 - proms: - report: - url: http://proms-dev1-vc.it.csiro.au/id/report/ - reportingsystem: - uri: http://proms-dev1-vc.it.csiro.au/rs - - # Gives us separation from real DB passwords. - encryption: - password: ABCdefGHI - - # AWS keys to be used for dev only. Production system to use AWS roles for authorization - aws: - account: AWS_ACCOUNT - accesskey: AWS_ACCESS_KEY - secretkey: AWS_SECRET_KEY - sessionkey: AWS_SESSION_KEY - stsrequirement: Mandatory - - # Cloud VM files - vm: - sh: https://raw.githubusercontent.com/AuScope/ANVGL-Portal/master/vm/vl.sh - vm-shutdown: - sh: https://raw.githubusercontent.com/AuScope/ANVGL-Portal/master/vm/vl-shutdown.sh - - erddapservice: - url: http://siss2.anu.edu.au/erddap/griddap/ - stripeApiKey: STRIPE_API_KEY - env: stackdriver: enable: false diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-do-not-solve-geometry.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-do-not-solve-geometry.py deleted file mode 100644 index 83193f998..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-do-not-solve-geometry.py +++ /dev/null @@ -1,244 +0,0 @@ -#! /usr/bin/python2.7 -import csv, sys, os, subprocess, glob, time, datetime -import xml.etree.ElementTree as ET - -controlFileString = """ -Control Begin - - NumberOfSystems = 1 - - EMSystem1 Begin - SystemFile = tempestStandard.stm - UseXComponent = yes - UseYComponent = no - UseZComponent = yes - - InvertTotalField = no - ReconstructPrimaryFieldFromInputGeometry = no - - EstimateNoiseFromModel = yes - - XMultiplicativeNoise = 2.26 - XAdditiveNoise = 0.0119 0.0117 0.0093 0.0061 0.0057 0.0054 0.0051 0.0048 0.0046 0.0044 0.0043 0.0040 0.0034 0.0026 0.0034 - XComponentSecondary = Column 30 - - ZMultiplicativeNoise = 3.74 - ZAdditiveNoise = 0.0094 0.0084 0.0067 0.0047 0.0045 0.0043 0.0041 0.0039 0.0036 0.0034 0.0033 0.0030 0.0024 0.0017 0.0019 - ZComponentSecondary = -Column 68 - EMSystem1 End - - Earth Begin - NumberOfLayers = 30 - Earth End - - Options Begin - SolveConductivity = yes - SolveThickness = no - - SolveTX_Height = no - SolveTX_Roll = no - SolveTX_Pitch = no - SolveTX_Yaw = no - SolveTXRX_DX = no - SolveTXRX_DY = no - SolveTXRX_DZ = no - SolveRX_Roll = no - SolveRX_Pitch = no - SolveRX_Yaw = no - - AlphaConductivity = 1.0 - AlphaThickness = 0.0 - AlphaGeometry = 1.0 - AlphaSmoothness = 100000 //Set to 0 for no vertical conductivity smoothing - SmoothnessMethod = Minimise2ndDerivatives - - MinimumPhiD = 1.0 - MinimumPercentageImprovement = 1.0 - MaximumIterations = 100 - Options End - - Input Begin - DataFile = aemInput.dat - HeaderLines = 0 - Subsample = 1 - - Columns Begin - SurveyNumber = Column 5 - DateNumber = Column 7 - FlightNumber = Column 2 - LineNumber = Column 1 - FidNumber = Column 3 - Easting = Column 12 - Northing = Column 13 - GroundElevation = Column 17 - Altimeter = Column 14 - - TX_Height = Column 21 - TX_Roll = Column 20 - TX_Pitch = -Column 19 - TX_Yaw = 0 - TXRX_DX = Column 22 - TXRX_DY = 0 - TXRX_DZ = Column 23 - RX_Roll = 0 - RX_Pitch = 0 - RX_Yaw = 0 - - ReferenceModel Begin - Conductivity = 0.001 - Thickness = 4.00 4.40 4.84 5.32 5.86 6.44 7.09 7.79 8.57 9.43 10.37 11.41 12.55 13.81 15.19 16.71 18.38 20.22 22.24 24.46 26.91 29.60 32.56 35.82 39.40 43.34 47.67 52.44 57.68 - ReferenceModel End - - StdDevReferenceModel Begin - Conductivity = 3.0 - StdDevReferenceModel End - Columns End - Input End - - Output Begin - DataFile = inversion.output.asc - LogFile = inversion.output.log - - PositiveLayerBottomDepths = no - NegativeLayerBottomDepths = yes - InterfaceElevations = no - ParameterSensitivity = no - ParameterUncertainty = no - PredictedData = no - Output End - -Control End -""" - -tempestStandardFileString = """ -System Begin - Name = Tempest - Type = Time Domain - - Transmitter Begin - NumberOfTurns = 1 - PeakCurrent = 0.5 - LoopArea = 1 - BaseFrequency = 25 - WaveFormCurrent Begin - -0.0200000000000 0.0 - -0.0199933333333 1.0 - -0.0000066666667 1.0 - 0.0000000000000 0.0 - 0.0000066666667 -1.0 - 0.0199933333333 -1.0 - 0.0200000000000 0.0 - WaveFormCurrent End - WaveformDigitisingFrequency = 75000 - Transmitter End - - Receiver Begin - - NumberOfWindows = 15 - WindowWeightingScheme = Boxcar - - WindowTimes Begin - 0.0000066667 0.0000200000 - 0.0000333333 0.0000466667 - 0.0000600000 0.0000733333 - 0.0000866667 0.0001266667 - 0.0001400000 0.0002066667 - 0.0002200000 0.0003400000 - 0.0003533333 0.0005533333 - 0.0005666667 0.0008733333 - 0.0008866667 0.0013533333 - 0.0013666667 0.0021000000 - 0.0021133333 0.0032733333 - 0.0032866667 0.0051133333 - 0.0051266667 0.0079933333 - 0.0080066667 0.0123933333 - 0.0124066667 0.0199933333 - WindowTimes End - - Receiver End - - ForwardModelling Begin - - OutputType = B - - XOutputScaling = 1e15 - YOutputScaling = 1e15 - ZOutputScaling = 1e15 - SecondaryFieldNormalisation = none - - FrequenciesPerDecade = 6 - NumberOfAbsiccaInHankelTransformEvaluation = 21 - - ForwardModelling End - -System End -""" - -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) - - -# Write our control files -with open("galeisbs.con", "w") as f: - f.write(controlFileString) -with open("tempestStandard.stm", "w") as f: - f.write(tempestStandardFileString) -cloudUpload("galeisbs.con", "galeisbs.con") -cloudUpload("tempestStandard.stm", "tempestStandard.stm") - -# Read the WFS Input data into a CSV format -tree = ET.parse("${wfs-input-xml}"); -root = tree.getroot(); -csvArray=[]; -for featureMembers in root: - for aemsurveys in featureMembers: - row = [] - for field in aemsurveys: - #Non simple properties are ignored - if len(field) > 0: - continue - row.append(field.text) - csvArray.append(row) -with open("aemInput.dat",'w') as f: - writer = csv.writer(f, delimiter=' ', lineterminator='\n') - for row in csvArray: - writer.writerow(row) -cloudUpload("aemInput.dat", "aemInput.dat") - -# Execute AEM Process via MPI -subprocess.call(["mpirun", "-n", "${n-threads}", "/usr/bin/gaaem/galeisbstdem.exe", "galeisbs.con"]) - -# Upload results -inversionFiles = glob.glob('inversion.output.*') -print 'About to upload the following files:' -print inversionFiles -for fn in inversionFiles: - cloudUpload(fn, fn) - -# Concatenate output files for easier parsing -ascFiles = sorted(glob.glob('inversion.output.*.asc')) -with open('inversion.output.asc.combined', 'w') as outfile: - for fname in ascFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.asc.combined', 'inversion.output.asc.combined') -logFiles = sorted(glob.glob('inversion.output.*.log')) -with open('inversion.output.log.combined', 'w') as outfile: - for fname in logFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.log.combined', 'inversion.output.log.combined') diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-solve-rxpitch-and-offsets.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-solve-rxpitch-and-offsets.py deleted file mode 100644 index 58d765ca6..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-solve-rxpitch-and-offsets.py +++ /dev/null @@ -1,265 +0,0 @@ -#! /usr/bin/python2.7 -import csv, sys, os, subprocess, glob, time, datetime -import xml.etree.ElementTree as ET - -controlFileString = """ -Control Begin - - NumberOfSystems = 1 - - EMSystem1 Begin - SystemFile = tempestStandard.stm - UseXComponent = yes - UseYComponent = no - UseZComponent = yes - - InvertTotalField = yes - ReconstructPrimaryFieldFromInputGeometry = yes - - EstimateNoiseFromModel = yes - - XMultiplicativeNoise = 2.26 - XAdditiveNoise = 0.0119 0.0117 0.0093 0.0061 0.0057 0.0054 0.0051 0.0048 0.0046 0.0044 0.0043 0.0040 0.0034 0.0026 0.0034 - XComponentSecondary = Column 30 - - ZMultiplicativeNoise = 3.74 - ZAdditiveNoise = 0.0094 0.0084 0.0067 0.0047 0.0045 0.0043 0.0041 0.0039 0.0036 0.0034 0.0033 0.0030 0.0024 0.0017 0.0019 - ZComponentSecondary = -Column 68 - EMSystem1 End - - Earth Begin - NumberOfLayers = 30 - Earth End - - Options Begin - SolveConductivity = yes - SolveThickness = no - - SolveTX_Height = no - SolveTX_Roll = no - SolveTX_Pitch = no - SolveTX_Yaw = no - SolveTXRX_DX = yes - SolveTXRX_DY = no - SolveTXRX_DZ = yes - SolveRX_Roll = no - SolveRX_Pitch = yes - SolveRX_Yaw = no - - AlphaConductivity = 1.0 - AlphaThickness = 0.0 - AlphaGeometry = 1.0 - AlphaSmoothness = 100000 //Set to 0 for no vertical conductivity smoothing - SmoothnessMethod = Minimise2ndDerivatives - - MinimumPhiD = 1.0 - MinimumPercentageImprovement = 1.0 - MaximumIterations = 100 - Options End - - Input Begin - DataFile = aemInput.dat - HeaderLines = 0 - Subsample = 1 - - Columns Begin - SurveyNumber = Column 5 - DateNumber = Column 7 - FlightNumber = Column 2 - LineNumber = Column 1 - FidNumber = Column 3 - Easting = Column 12 - Northing = Column 13 - GroundElevation = Column 17 - Altimeter = Column 14 - - TX_Height = Column 21 - TX_Roll = Column 20 - TX_Pitch = -Column 19 - TX_Yaw = 0 - TXRX_DX = Column 24 - TXRX_DY = -Column 25 - TXRX_DZ = Column 26 - RX_Roll = 0 - RX_Pitch = 0 - RX_Yaw = 0 - - TotalFieldReconstruction Begin - //The system geometry that corresponds to the primary field removed during Fugro/CGG's processing - //Skyvan platform use TX_Pitch = -0.45 (=+0.45 in TEMPEST sign convention) - //Casa platform use TX_Pitch = -0.90 (=+0.90 in TEMPEST sign convention) - TX_Roll = 0 - TX_Pitch = -0.45 - TX_Yaw = 0 - TXRX_DX = Column 22 - TXRX_DY = 0 - TXRX_DZ = Column 23 - RX_Roll = 0 - RX_Pitch = 0 - RX_Yaw = 0 - TotalFieldReconstruction End - - ReferenceModel Begin - TXRX_DX = Column 22 - TXRX_DZ = Column 23 - RX_Pitch = 0 - Conductivity = 0.001 - Thickness = 4.00 4.40 4.84 5.32 5.86 6.44 7.09 7.79 8.57 9.43 10.37 11.41 12.55 13.81 15.19 16.71 18.38 20.22 22.24 24.46 26.91 29.60 32.56 35.82 39.40 43.34 47.67 52.44 57.68 - ReferenceModel End - - StdDevReferenceModel Begin - TXRX_DX = 1.0 - TXRX_DZ = 1.0 - RX_Pitch = 1.0 - Conductivity = 3.0 - StdDevReferenceModel End - Columns End - Input End - - Output Begin - DataFile = inversion.output.asc - LogFile = inversion.output.log - - PositiveLayerBottomDepths = no - NegativeLayerBottomDepths = yes - InterfaceElevations = no - ParameterSensitivity = no - ParameterUncertainty = no - PredictedData = no - Output End - -Control End -""" - -tempestStandardFileString = """ -System Begin - Name = Tempest - Type = Time Domain - - Transmitter Begin - NumberOfTurns = 1 - PeakCurrent = 0.5 - LoopArea = 1 - BaseFrequency = 25 - WaveFormCurrent Begin - -0.0200000000000 0.0 - -0.0199933333333 1.0 - -0.0000066666667 1.0 - 0.0000000000000 0.0 - 0.0000066666667 -1.0 - 0.0199933333333 -1.0 - 0.0200000000000 0.0 - WaveFormCurrent End - WaveformDigitisingFrequency = 75000 - Transmitter End - - Receiver Begin - - NumberOfWindows = 15 - WindowWeightingScheme = Boxcar - - WindowTimes Begin - 0.0000066667 0.0000200000 - 0.0000333333 0.0000466667 - 0.0000600000 0.0000733333 - 0.0000866667 0.0001266667 - 0.0001400000 0.0002066667 - 0.0002200000 0.0003400000 - 0.0003533333 0.0005533333 - 0.0005666667 0.0008733333 - 0.0008866667 0.0013533333 - 0.0013666667 0.0021000000 - 0.0021133333 0.0032733333 - 0.0032866667 0.0051133333 - 0.0051266667 0.0079933333 - 0.0080066667 0.0123933333 - 0.0124066667 0.0199933333 - WindowTimes End - - Receiver End - - ForwardModelling Begin - - OutputType = B - - XOutputScaling = 1e15 - YOutputScaling = 1e15 - ZOutputScaling = 1e15 - SecondaryFieldNormalisation = none - - FrequenciesPerDecade = 6 - NumberOfAbsiccaInHankelTransformEvaluation = 21 - - ForwardModelling End - -System End -""" - -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) - - -# Write our control files -with open("galeisbs.con", "w") as f: - f.write(controlFileString) -with open("tempestStandard.stm", "w") as f: - f.write(tempestStandardFileString) -cloudUpload("galeisbs.con", "galeisbs.con") -cloudUpload("tempestStandard.stm", "tempestStandard.stm") - -# Read the WFS Input data into a CSV format -tree = ET.parse("${wfs-input-xml}"); -root = tree.getroot(); -csvArray=[]; -for featureMembers in root: - for aemsurveys in featureMembers: - row = [] - for field in aemsurveys: - #Non simple properties are ignored - if len(field) > 0: - continue - row.append(field.text) - csvArray.append(row) -with open("aemInput.dat",'w') as f: - writer = csv.writer(f, delimiter=' ', lineterminator='\n') - for row in csvArray: - writer.writerow(row) -cloudUpload("aemInput.dat", "aemInput.dat") - -# Execute AEM Process via MPI -subprocess.call(["mpirun", "-n", "${n-threads}", "/usr/bin/gaaem/galeisbstdem.exe", "galeisbs.con"]) - -# Upload results -inversionFiles = glob.glob('inversion.output.*') -print 'About to upload the following files:' -print inversionFiles -for fn in inversionFiles: - cloudUpload(fn, fn) - -# Concatenate output files for easier parsing -ascFiles = sorted(glob.glob('inversion.output.*.asc')) -with open('inversion.output.asc.combined', 'w') as outfile: - for fname in ascFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.asc.combined', 'inversion.output.asc.combined') -logFiles = sorted(glob.glob('inversion.output.*.log')) -with open('inversion.output.log.combined', 'w') as outfile: - for fname in logFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.log.combined', 'inversion.output.log.combined') diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-solve-rxpitch.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-solve-rxpitch.py deleted file mode 100644 index 11b1bf459..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-Frome-tempest-galeisbstdem-solve-rxpitch.py +++ /dev/null @@ -1,246 +0,0 @@ -#! /usr/bin/python2.7 -import csv, sys, os, subprocess, glob, time, datetime -import xml.etree.ElementTree as ET - -controlFileString = """ -Control Begin - - NumberOfSystems = 1 - - EMSystem1 Begin - SystemFile = tempestStandard.stm - UseXComponent = yes - UseYComponent = no - UseZComponent = yes - - InvertTotalField = no - ReconstructPrimaryFieldFromInputGeometry = no - - EstimateNoiseFromModel = yes - - XMultiplicativeNoise = 2.26 - XAdditiveNoise = 0.0119 0.0117 0.0093 0.0061 0.0057 0.0054 0.0051 0.0048 0.0046 0.0044 0.0043 0.0040 0.0034 0.0026 0.0034 - XComponentSecondary = Column 30 - - ZMultiplicativeNoise = 3.74 - ZAdditiveNoise = 0.0094 0.0084 0.0067 0.0047 0.0045 0.0043 0.0041 0.0039 0.0036 0.0034 0.0033 0.0030 0.0024 0.0017 0.0019 - ZComponentSecondary = -Column 68 - EMSystem1 End - - Earth Begin - NumberOfLayers = 30 - Earth End - - Options Begin - SolveConductivity = yes - SolveThickness = no - - SolveTX_Height = no - SolveTX_Roll = no - SolveTX_Pitch = no - SolveTX_Yaw = no - SolveTXRX_DX = no - SolveTXRX_DY = no - SolveTXRX_DZ = no - SolveRX_Roll = no - SolveRX_Pitch = yes - SolveRX_Yaw = no - - AlphaConductivity = 1.0 - AlphaThickness = 0.0 - AlphaGeometry = 1.0 - AlphaSmoothness = 100000 //Set to 0 for no vertical conductivity smoothing - SmoothnessMethod = Minimise2ndDerivatives - - MinimumPhiD = 1.0 - MinimumPercentageImprovement = 1.0 - MaximumIterations = 100 - Options End - - Input Begin - DataFile = aemInput.dat - HeaderLines = 0 - Subsample = 1 - - Columns Begin - SurveyNumber = Column 5 - DateNumber = Column 7 - FlightNumber = Column 2 - LineNumber = Column 1 - FidNumber = Column 3 - Easting = Column 12 - Northing = Column 13 - GroundElevation = Column 17 - Altimeter = Column 14 - - TX_Height = Column 21 - TX_Roll = Column 20 - TX_Pitch = -Column 19 - TX_Yaw = 0 - TXRX_DX = Column 22 - TXRX_DY = 0 - TXRX_DZ = Column 23 - RX_Roll = 0 - RX_Pitch = 0 - RX_Yaw = 0 - - ReferenceModel Begin - RX_Pitch = 0.0 - Conductivity = 0.001 - Thickness = 4.00 4.40 4.84 5.32 5.86 6.44 7.09 7.79 8.57 9.43 10.37 11.41 12.55 13.81 15.19 16.71 18.38 20.22 22.24 24.46 26.91 29.60 32.56 35.82 39.40 43.34 47.67 52.44 57.68 - ReferenceModel End - - StdDevReferenceModel Begin - RX_Pitch = 1.0 - Conductivity = 3.0 - StdDevReferenceModel End - Columns End - Input End - - Output Begin - DataFile = inversion.output.asc - LogFile = inversion.output.log - - PositiveLayerBottomDepths = no - NegativeLayerBottomDepths = yes - InterfaceElevations = no - ParameterSensitivity = no - ParameterUncertainty = no - PredictedData = no - Output End - -Control End -""" - -tempestStandardFileString = """ -System Begin - Name = Tempest - Type = Time Domain - - Transmitter Begin - NumberOfTurns = 1 - PeakCurrent = 0.5 - LoopArea = 1 - BaseFrequency = 25 - WaveFormCurrent Begin - -0.0200000000000 0.0 - -0.0199933333333 1.0 - -0.0000066666667 1.0 - 0.0000000000000 0.0 - 0.0000066666667 -1.0 - 0.0199933333333 -1.0 - 0.0200000000000 0.0 - WaveFormCurrent End - WaveformDigitisingFrequency = 75000 - Transmitter End - - Receiver Begin - - NumberOfWindows = 15 - WindowWeightingScheme = Boxcar - - WindowTimes Begin - 0.0000066667 0.0000200000 - 0.0000333333 0.0000466667 - 0.0000600000 0.0000733333 - 0.0000866667 0.0001266667 - 0.0001400000 0.0002066667 - 0.0002200000 0.0003400000 - 0.0003533333 0.0005533333 - 0.0005666667 0.0008733333 - 0.0008866667 0.0013533333 - 0.0013666667 0.0021000000 - 0.0021133333 0.0032733333 - 0.0032866667 0.0051133333 - 0.0051266667 0.0079933333 - 0.0080066667 0.0123933333 - 0.0124066667 0.0199933333 - WindowTimes End - - Receiver End - - ForwardModelling Begin - - OutputType = B - - XOutputScaling = 1e15 - YOutputScaling = 1e15 - ZOutputScaling = 1e15 - SecondaryFieldNormalisation = none - - FrequenciesPerDecade = 6 - NumberOfAbsiccaInHankelTransformEvaluation = 21 - - ForwardModelling End - -System End -""" - -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) - - -# Write our control files -with open("galeisbs.con", "w") as f: - f.write(controlFileString) -with open("tempestStandard.stm", "w") as f: - f.write(tempestStandardFileString) -cloudUpload("galeisbs.con", "galeisbs.con") -cloudUpload("tempestStandard.stm", "tempestStandard.stm") - -# Read the WFS Input data into a CSV format -tree = ET.parse("${wfs-input-xml}"); -root = tree.getroot(); -csvArray=[]; -for featureMembers in root: - for aemsurveys in featureMembers: - row = [] - for field in aemsurveys: - #Non simple properties are ignored - if len(field) > 0: - continue - row.append(field.text) - csvArray.append(row) -with open("aemInput.dat",'w') as f: - writer = csv.writer(f, delimiter=' ', lineterminator='\n') - for row in csvArray: - writer.writerow(row) -cloudUpload("aemInput.dat", "aemInput.dat") - -# Execute AEM Process via MPI -subprocess.call(["mpirun", "-n", "${n-threads}", "/usr/bin/gaaem/galeisbstdem.exe", "galeisbs.con"]) - -# Upload results -inversionFiles = glob.glob('inversion.output.*') -print 'About to upload the following files:' -print inversionFiles -for fn in inversionFiles: - cloudUpload(fn, fn) - -# Concatenate output files for easier parsing -ascFiles = sorted(glob.glob('inversion.output.*.asc')) -with open('inversion.output.asc.combined', 'w') as outfile: - for fname in ascFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.asc.combined', 'inversion.output.asc.combined') -logFiles = sorted(glob.glob('inversion.output.*.log')) -with open('inversion.output.log.combined', 'w') as outfile: - for fname in logFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.log.combined', 'inversion.output.log.combined') diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-bhmar-skytem-galeisbstdem.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-bhmar-skytem-galeisbstdem.py deleted file mode 100644 index 8578baac7..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-bhmar-skytem-galeisbstdem.py +++ /dev/null @@ -1,357 +0,0 @@ -#! /usr/bin/python2.7 -import csv, sys, os, subprocess, glob, time, datetime -import xml.etree.ElementTree as ET - -controlFileString = """ -Control Begin - - NumberOfSystems = 2 - - EMSystem1 Begin - SystemFile = SkytemLM.stm - UseXComponent = no - UseYComponent = no - UseZComponent = yes - InvertTotalField = no - ReconstructPrimaryFieldFromInputGeometry = no - EstimateNoiseFromModel = no - ZComponentSecondary = -Column 33 - ZComponentNoise = Column 72 - EMSystem1 End - - EMSystem2 Begin - SystemFile = SkytemHM.stm - UseXComponent = no - UseYComponent = no - UseZComponent = yes - InvertTotalField = no - ReconstructPrimaryFieldFromInputGeometry = no - EstimateNoiseFromModel = no - ZComponentSecondary = -Column 51 - ZComponentNoise = Column 90 - EMSystem2 End - - Earth Begin - NumberOfLayers = 30 - Earth End - - Options Begin - SolveConductivity = yes - SolveThickness = no - - SolveTX_Height = no - SolveTX_Roll = no - SolveTX_Pitch = no - SolveTX_Yaw = no - SolveTXRX_DX = no - SolveTXRX_DY = no - SolveTXRX_DZ = no - SolveRX_Roll = no - SolveRX_Pitch = no - SolveRX_Yaw = no - - AlphaConductivity = 1.0 - AlphaThickness = 0.0 - AlphaGeometry = 1.0 - AlphaSmoothness = 333 //Set to 0 for no vertical conductivity smoothing - SmoothnessMethod = Minimise2ndDerivatives - - MinimumPhiD = 1.0 - MinimumPercentageImprovement = 1.0 - MaximumIterations = 100 - Options End - - Input Begin - DataFile = aemInput.dat - HeaderLines = 0 - Subsample = 1 - - Columns Begin - SurveyNumber = Column 1 - DateNumber = Column 4 - FlightNumber = Column 5 - LineNumber = Column 6 - FidNumber = Column 7 - Easting = Column 12 - Northing = Column 13 - GroundElevation = Column 21 - Altimeter = Column 19 - - TX_Height = Column 19 - TX_Roll = Column 24 - TX_Pitch = Column 25 - TX_Yaw = Column 26 - TXRX_DX = Column 27 - TXRX_DY = Column 28 - TXRX_DZ = Column 29 - RX_Roll = Column 24 - RX_Pitch = Column 25 - RX_Yaw = Column 26 - - ReferenceModel Begin - Conductivity = 0.001 - Thickness = 1.50 1.65 1.81 2.00 2.20 2.42 2.66 2.92 3.21 3.54 3.89 4.28 4.71 5.18 5.70 6.27 6.89 7.58 8.34 9.17 10.09 11.10 12.21 13.43 14.77 16.25 17.88 19.66 21.63 - ReferenceModel End - - StdDevReferenceModel Begin - Conductivity = 3.0 - StdDevReferenceModel End - Columns End - Input End - - Output Begin - DataFile = inversion.output.asc - LogFile = inversion.output.log - - PositiveLayerBottomDepths = no - NegativeLayerBottomDepths = yes - InterfaceElevations = no - ParameterSensitivity = yes - ParameterUncertainty = yes - PredictedData = no - Output End - -Control End -""" - -stmSkytemLMFileString = """ -System Begin - Name = SkyTem-Low-Moment - Type = Time Domain - - Transmitter Begin - NumberOfTurns = 1 - PeakCurrent = 1 - LoopArea = 1 - BaseFrequency = 222.22222222222222222 - WaveformDigitisingFrequency = 3640888.888888889 - WaveFormCurrent Begin - -1.000E-03 0.000E+00 - -9.146E-04 6.264E-01 - -7.879E-04 9.132E-01 - -5.964E-04 9.905E-01 - 0.000E+00 1.000E+00 - 4.629E-07 9.891E-01 - 8.751E-07 9.426E-01 - 1.354E-06 8.545E-01 - 2.540E-06 6.053E-01 - 3.972E-06 3.030E-01 - 5.404E-06 4.077E-02 - 5.721E-06 1.632E-02 - 6.113E-06 4.419E-03 - 6.663E-06 6.323E-04 - 8.068E-06 0.000E+00 - 1.250E-03 0.000E+00 - WaveFormCurrent End - - Transmitter End - - Receiver Begin - NumberOfWindows = 18 - WindowWeightingScheme = AreaUnderCurve - - //Gate04 (0.00001139 0.00001500) was removed as too close to 11.5us front gate - WindowTimes Begin - 0.00001539 0.00001900 - 0.00001939 0.00002400 - 0.00002439 0.00003100 - 0.00003139 0.00003900 - 0.00003939 0.00004900 - 0.00004939 0.00006200 - 0.00006239 0.00007800 - 0.00007839 0.00009900 - 0.00009939 0.00012500 - 0.00012539 0.00015700 - 0.00015739 0.00019900 - 0.00019939 0.00025000 - 0.00025039 0.00031500 - 0.00031539 0.00039700 - 0.00039739 0.00050000 - 0.00050039 0.00063000 - 0.00063039 0.00079300 - 0.00079339 0.00099900 - WindowTimes End - - LowPassFilter Begin - CutOffFrequency = 300000 450000 - Order = 1 1 - LowPassFilter End - - Receiver End - - ForwardModelling Begin - - OutputType = dB/dt - - SaveDiagnosticFiles = no - - XOutputScaling = 1 - YOutputScaling = 1 - ZOutputScaling = 1 - SecondaryFieldNormalisation = none - - FrequenciesPerDecade = 5 - NumberOfAbsiccaInHankelTransformEvaluation = 21 - - ForwardModelling End - -System End -""" - -stmSkytemHMFileString = """ -System Begin - Name = SkyTem-HighMoment - Type = Time Domain - - Transmitter Begin - NumberOfTurns = 1 - PeakCurrent = 1 - LoopArea = 1 - BaseFrequency = 25 - WaveformDigitisingFrequency = 819200 - WaveFormCurrent Begin - -1.000E-02 0.000E+00 - -8.386E-03 4.568E-01 - -6.380E-03 7.526E-01 - -3.783E-03 9.204E-01 - 0.000E+00 1.000E+00 - 3.960E-07 9.984E-01 - 7.782E-07 9.914E-01 - 1.212E-06 9.799E-01 - 3.440E-06 9.175E-01 - 1.981E-05 4.587E-01 - 3.619E-05 7.675E-03 - 3.664E-05 3.072E-03 - 3.719E-05 8.319E-04 - 3.798E-05 1.190E-04 - 3.997E-05 0.000E+00 - 1.000E-02 0.000E+00 - WaveFormCurrent End - Transmitter End - - Receiver Begin - NumberOfWindows = 21 - WindowWeightingScheme = AreaUnderCurve - - //Gate11 (5.93900E-05 7.50000E-05) was removed as too close to 59us front gate - WindowTimes Begin - 7.53900E-05 9.60000E-05 - 9.63900E-05 1.22000E-04 - 1.22390E-04 1.54000E-04 - 1.54390E-04 1.96000E-04 - 1.96390E-04 2.47000E-04 - 2.47390E-04 3.12000E-04 - 3.12390E-04 3.94000E-04 - 3.94390E-04 4.97000E-04 - 4.97390E-04 6.27000E-04 - 6.27390E-04 7.90000E-04 - 7.90390E-04 9.96000E-04 - 9.96390E-04 1.25500E-03 - 1.25539E-03 1.58100E-03 - 1.58139E-03 1.99100E-03 - 1.99139E-03 2.50800E-03 - 2.50839E-03 3.15800E-03 - 3.15839E-03 3.97700E-03 - 3.97739E-03 5.00800E-03 - 5.00839E-03 6.30600E-03 - 6.30639E-03 7.93900E-03 - 7.93939E-03 9.73900E-03 - WindowTimes End - - LowPassFilter Begin - CutOffFrequency = 300000 450000 - Order = 1 1 - LowPassFilter End - - Receiver End - - ForwardModelling Begin - - OutputType = dB/dt - - SaveDiagnosticFiles = no - - XOutputScaling = 1 - YOutputScaling = 1 - ZOutputScaling = 1 - SecondaryFieldNormalisation = none - - FrequenciesPerDecade = 5 - NumberOfAbsiccaInHankelTransformEvaluation = 21 - - ForwardModelling End - -System End -""" - -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) - - -# Write our control files -with open("galeisbs.con", "w") as f: - f.write(controlFileString) -with open("SkytemLM.stm", "w") as f: - f.write(stmSkytemLMFileString) -with open("SkytemHM.stm", "w") as f: - f.write(stmSkytemHMFileString) -cloudUpload("galeisbs.con", "galeisbs.con") -cloudUpload("SkytemLM.stm", "SkytemLM.stm") -cloudUpload("SkytemHM.stm", "SkytemHM.stm") - -# Read the WFS Input data into a CSV format -tree = ET.parse("${wfs-input-xml}"); -root = tree.getroot(); -csvArray=[]; -for featureMembers in root: - for aemsurveys in featureMembers: - row = [] - for field in aemsurveys: - #Non simple properties are ignored - if len(field) > 0: - continue - row.append(field.text) - csvArray.append(row) -with open("aemInput.dat",'w') as f: - writer = csv.writer(f, delimiter=' ', lineterminator='\n') - for row in csvArray: - writer.writerow(row) -cloudUpload("aemInput.dat", "aemInput.dat") - -# Execute AEM Process via MPI -subprocess.call(["mpirun", "-n", "${n-threads}", "/usr/bin/gaaem/galeisbstdem.exe", "galeisbs.con"]) - -# Upload results -inversionFiles = glob.glob('inversion.output.*') -print 'About to upload the following files:' -print inversionFiles -for fn in inversionFiles: - cloudUpload(fn, fn) - -# Concatenate output files for easier parsing -ascFiles = sorted(glob.glob('inversion.output.*.asc')) -with open('inversion.output.asc.combined', 'w') as outfile: - for fname in ascFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.asc.combined', 'inversion.output.asc.combined') -logFiles = sorted(glob.glob('inversion.output.*.log')) -with open('inversion.output.log.combined', 'w') as outfile: - for fname in logFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.log.combined', 'inversion.output.log.combined') diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-inversion.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-inversion.py deleted file mode 100644 index 5f09485fc..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-inversion.py +++ /dev/null @@ -1,255 +0,0 @@ -#! /usr/bin/python2.7 -import csv, sys, os, subprocess, glob, time, datetime -import xml.etree.ElementTree as ET - -controlFileString = """ -Control Begin - - LogFile = inversion.output.log - NumberOfSystems = 1 - - EMSystem1 Begin - SystemFile = tempest.stm - UseXComponent = yes - UseYComponent = no - UseZComponent = yes - - InvertTotalField = yes - ReconstructPrimaryFieldFromInputGeometry = yes - - EstimateNoiseFromModel = yes - XMultiplicativeNoise = ${xmulti-noise} - XAdditiveNoise = ${xadd-noise} - ZMultiplicativeNoise = ${zmulti-noise} - ZAdditiveNoise = ${zadd-noise} - - XComponentPrimary = ${column-xcomponentprimary} - YComponentPrimary = ${column-ycomponentprimary} - ZComponentPrimary = ${column-zcomponentprimary} - - XComponentSecondary = ${column-xcomponentsecondary} - YComponentSecondary = ${column-ycomponentsecondary} - ZComponentSecondary = ${column-zcomponentsecondary} - - StdDevXComponentWindows = ${column-stddevxwindows} - StdDevYComponentWindows = ${column-stddevywindows} - StdDevZComponentWindows = ${column-stddevzwindows} - EMSystem1 End - - Earth Begin - NumberOfLayers = ${earth-layers} - Earth End - - Options Begin - SolveConductivity = ${solve-conductivity} - SolveThickness = ${solve-thickness} - - SolveTX_Height = ${solve-txheight} - SolveTX_Roll = ${solve-txroll} - SolveTX_Pitch = ${solve-txpitch} - SolveTX_Yaw = ${solve-txyaw} - SolveTXRX_DX = ${solve-txrxdx} - SolveTXRX_DY = ${solve-txrxdy} - SolveTXRX_DZ = ${solve-txrxdz} - SolveRX_Roll = ${solve-rxroll} - SolveRX_Pitch = ${solve-rxpitch} - SolveRX_Yaw = ${solve-rxyaw} - - AlphaConductivity = ${alpha-conductivity} - AlphaThickness = ${alpha-thickness} - AlphaGeometry = ${alpha-geometry} - AlphaSmoothness = ${alpha-smoothness} - - MinimumPhiD = ${min-phi-d} - MinimumPercentageImprovement = ${min-percentage-imp} - MaximumIterations = ${max-iterations} - Options End - - InputOutput Begin - InputFile = aemInput.dat - HeaderLines = 0 - Subsample = 1 - - OutputDataFile = inversion.output.asc - OutputHeaderFile = inversion.output.hdr - - Columns Begin - SurveyNumber = ${column-surveynumber} - DateNumber = ${column-datenumber} - FlightNumber = ${column-flightnumber} - LineNumber = ${column-linenumber} - FidNumber = ${column-fidnumber} - Easting = ${column-easting} - Northing = ${column-northing} - GroundElevation = ${column-groundelevation} - Altimeter = ${column-altimeter} - - TX_Height = ${column-txheight} - TX_Roll = ${column-txroll} - TX_Pitch = ${column-txpitch} - TX_Yaw = ${column-txyaw} - TXRX_DX = ${column-txrxdx} - TXRX_DY = ${column-txrxdy} - TXRX_DZ = ${column-txrxdz} - RX_Roll = ${column-rxroll} - RX_Pitch = ${column-rxpitch} - RX_Yaw = ${column-rxyaw} - - TotalFieldReconstruction Begin - TXRX_DX = ${column-txrxdx} - TXRX_DY = ${column-txrxdy} - TXRX_DZ = ${column-txrxdz} - TotalFieldReconstruction End - - ReferenceModel Begin - TXRX_DX = ${column-txrxdx} - TXRX_DY = ${column-txrxdy} - TXRX_DZ = ${column-txrxdz} - RX_Pitch = ${column-rxpitch} - Conductivity = 0.01 - Thickness = ${thickness} - ReferenceModel End - - StdDevReferenceModel Begin - TXRX_DX = 1.0 - TXRX_DZ = 1.0 - RX_Pitch = 1.0 - Conductivity = 3.0 - StdDevReferenceModel End - Columns End - InputOutput End -Control End -""" - -stmFileString = """ -System Begin - Name = Tempest - Type = Time Domain - - Transmitter Begin - NumberOfTurns = 1 - PeakCurrent = 0.5 - LoopArea = 1 - BaseFrequency = 25 - WaveFormCurrent Begin - -0.0200000000000 0.0 - -0.0199933333333 1.0 - -0.0000066666667 1.0 - 0.0000000000000 0.0 - 0.0000066666667 -1.0 - 0.0199933333333 -1.0 - 0.0200000000000 0.0 - WaveFormCurrent End - WaveformDigitisingFrequency = 600000 - Transmitter End - - Receiver Begin - - NumberOfWindows = 15 - WindowWeightingScheme = Boxcar - - WindowTimes Begin - 0.0000066667 0.0000200000 - 0.0000333333 0.0000466667 - 0.0000600000 0.0000733333 - 0.0000866667 0.0001266667 - 0.0001400000 0.0002066667 - 0.0002200000 0.0003400000 - 0.0003533333 0.0005533333 - 0.0005666667 0.0008733333 - 0.0008866667 0.0013533333 - 0.0013666667 0.0021000000 - 0.0021133333 0.0032733333 - 0.0032866667 0.0051133333 - 0.0051266667 0.0079933333 - 0.0080066667 0.0123933333 - 0.0124066667 0.0199933333 - WindowTimes End - - Receiver End - - ForwardModelling Begin - - OutputType = B - - XOutputScaling = 1e15 - YOutputScaling = 1e15 - ZOutputScaling = 1e15 - SecondaryFieldNormalisation = none - - FrequenciesPerDecade = 5 - NumberOfAbsiccaInHankelTransformEvaluation = 21 - - ForwardModelling End - -System End -""" - -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) - - -# Write our control files -with open("galeisbs.con", "w") as f: - f.write(controlFileString) -with open("tempest.stm", "w") as f: - f.write(stmFileString) -cloudUpload("galeisbs.con", "galeisbs.con") -cloudUpload("tempest.stm", "tempest.stm") - -# Read the WFS Input data into a CSV format -tree = ET.parse("${wfs-input-xml}"); -root = tree.getroot(); -csvArray=[]; -for featureMembers in root: - for aemsurveys in featureMembers: - row = [] - for field in aemsurveys: - #Non simple properties are ignored - if len(field) > 0: - continue - row.append(field.text) - csvArray.append(row) -with open("aemInput.dat",'w') as f: - writer = csv.writer(f, delimiter=' ', lineterminator='\n') - for row in csvArray: - writer.writerow(row) -cloudUpload("aemInput.dat", "aemInput.dat") - -# Execute AEM Process via MPI -subprocess.call(["mpirun", "-n", "${n-threads}", "/usr/bin/gaaem/galeisbstdem.exe", "galeisbs.con"]) - -# Upload results -inversionFiles = glob.glob('inversion.output.*') -print 'About to upload the following files:' -print inversionFiles -for fn in inversionFiles: - cloudUpload(fn, fn) - -# Concatenate output files for easier parsing -ascFiles = sorted(glob.glob('inversion.output.*.asc')) -with open('inversion.output.asc.combined', 'w') as outfile: - for fname in ascFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.asc.combined', 'inversion.output.asc.combined') -logFiles = sorted(glob.glob('inversion.output.*.log')) -with open('inversion.output.log.combined', 'w') as outfile: - for fname in logFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.log.combined', 'inversion.output.log.combined') diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-thomson-vtem-galeisbstdem.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-thomson-vtem-galeisbstdem.py deleted file mode 100644 index 80bafb633..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/aem-thomson-vtem-galeisbstdem.py +++ /dev/null @@ -1,4112 +0,0 @@ -#! /usr/bin/python2.7 -import csv, sys, os, subprocess, glob, time, datetime -import xml.etree.ElementTree as ET - -controlFileString = """ -Control Begin - - NumberOfSystems = 1 - - EMSystem1 Begin - SystemFile = VTEMPlusPulseSouthernThomson.stm - UseXComponent = no - UseYComponent = no - UseZComponent = yes - InvertTotalField = no - ReconstructPrimaryFieldFromInputGeometry = no - - EstimateNoiseFromModel = yes - ZMultiplicativeNoise = 3.0 - ZAdditiveNoise = 0.161167 0.124984 0.094229 0.073026 0.058675 0.045173 0.038207 0.026160 0.019157 0.014366 0.011281 0.009552 0.008378 0.007234 0.006111 0.005186 0.004356 0.003580 0.003004 0.002447 0.002047 0.001648 0.001398 0.001154 0.000957 0.000809 0.000687 0.000589 0.000507 0.000444 0.000405 0.000352 0.000328 0.000303 0.000301 0.000319 0.000348 0.000360 0.000344 0.000310 0.000245 0.000196 0.000219 0.000177 0.000159 - ZComponentSecondary = -Column 177 - EMSystem1 End - - Earth Begin - NumberOfLayers = 30 - Earth End - - Options Begin - SolveConductivity = yes - SolveThickness = no - - SolveTX_Height = no - SolveTX_Roll = no - SolveTX_Pitch = no - SolveTX_Yaw = no - SolveTXRX_DX = no - SolveTXRX_DY = no - SolveTXRX_DZ = no - SolveRX_Roll = no - SolveRX_Pitch = no - SolveRX_Yaw = no - - AlphaConductivity = 1.0 - AlphaThickness = 0.0 - AlphaGeometry = 0.0 - AlphaSmoothness = 100000 //Set to 0 for no vertical conductivity smoothing - SmoothnessMethod = Minimise2ndDerivatives - - MinimumPhiD = 1.0 - MinimumPercentageImprovement = 1.0 - MaximumIterations = 100 - Options End - - Input Begin - DataFile = aemInput.dat - HeaderLines = 0 - Subsample = 1 - - Columns Begin - SurveyNumber = Column 1 - DateNumber = Column 6 - FlightNumber = Column 3 - LineNumber = Column 4 - FidNumber = Column 5 - Easting = Column 28 - Northing = Column 29 - GroundElevation = Column 32 - Altimeter = Column 22 - - TX_Height = Column 30 - TX_Roll = Column 23 - TX_Pitch = Column 24 - TX_Yaw = Column 25 - TXRX_DX = 0 - TXRX_DY = 0 - TXRX_DZ = 0 - RX_Roll = Column 23 - RX_Pitch = Column 24 - RX_Yaw = Column 25 - - ReferenceModel Begin - Conductivity = 0.001 - Thickness = 3.00 3.30 3.62 4.00 4.40 4.84 5.32 5.84 6.42 7.08 7.78 8.56 9.42 10.36 11.40 12.54 13.78 15.16 16.68 18.34 20.18 22.20 24.42 26.86 29.54 32.50 35.76 39.32 43.26 - ReferenceModel End - - StdDevReferenceModel Begin - Conductivity = 3.0 - StdDevReferenceModel End - Columns End - Input End - - Output Begin - DataFile = inversion.output.asc - LogFile = inversion.output.log - - PositiveLayerBottomDepths = no - NegativeLayerBottomDepths = yes - InterfaceElevations = no - ParameterSensitivity = no - ParameterUncertainty = no - PredictedData = no - Output End - -Control End -""" - -VTEMPlusPulseSouthernThomsonFileString = """ -System Begin - Name = VTEM-plus-7.3ms-pulse-southernthomson - Type = Time Domain - - Transmitter Begin - NumberOfTurns = 1 - PeakCurrent = 1 - LoopArea = 1 - BaseFrequency = 25 - WaveformDigitisingFrequency = 192000 - WaveFormCurrent Begin - File = VTEMPlusPulseSouthernThomson.cfm - WaveFormCurrent End - Transmitter End - - Receiver Begin - NumberOfWindows = 45 - WindowWeightingScheme = LinearTaper - WindowTimes Begin - 0.0000180 0.0000230 - 0.0000230 0.0000290 - 0.0000290 0.0000340 - 0.0000340 0.0000390 - 0.0000390 0.0000450 - 0.0000450 0.0000510 - 0.0000510 0.0000590 - 0.0000590 0.0000680 - 0.0000680 0.0000780 - 0.0000780 0.0000900 - 0.0000900 0.0001030 - 0.0001030 0.0001180 - 0.0001180 0.0001360 - 0.0001360 0.0001560 - 0.0001560 0.0001790 - 0.0001790 0.0002060 - 0.0002060 0.0002360 - 0.0002360 0.0002710 - 0.0002710 0.0003120 - 0.0003120 0.0003580 - 0.0003580 0.0004110 - 0.0004110 0.0004720 - 0.0004720 0.0005430 - 0.0005430 0.0006230 - 0.0006230 0.0007160 - 0.0007160 0.0008230 - 0.0008230 0.0009450 - 0.0009450 0.0010860 - 0.0010860 0.0012470 - 0.0012470 0.0014320 - 0.0014320 0.0016460 - 0.0016460 0.0018910 - 0.0018910 0.0021720 - 0.0021720 0.0024950 - 0.0024950 0.0028650 - 0.0028650 0.0032920 - 0.0032920 0.0037810 - 0.0037810 0.0043410 - 0.0043410 0.0049870 - 0.0049870 0.0057290 - 0.0057290 0.0065810 - 0.0065810 0.0075600 - 0.0075600 0.0086850 - 0.0086850 0.0099770 - 0.0100851 0.0113498 - WindowTimes End - - //Notes - //0.0099770 0.0114580 - real Gate 48 as per VTEM specs - //0.0100851 0.0113498 - symetric altered window to prevent linear taper extending into following half cycle - //0.0099770 0.0112957 = non-symetric altered window to prevent linear taper extending into following half cycle - - Receiver End - - ForwardModelling Begin - - OutputType = dB/dt - - XOutputScaling = 1e12 - YOutputScaling = 1e12 - ZOutputScaling = 1e12 - SecondaryFieldNormalisation = none - - FrequenciesPerDecade = 6 - NumberOfAbsiccaInHankelTransformEvaluation = 21 - - ForwardModelling End - -System End -""" - -VTEMPlusPulseSouthernThomsonCFMFileString = """ --0.007317708275 0.00674948 --0.007312499941 0.00961512 --0.007307291608 0.01303569 --0.007302083275 0.01692413 --0.007296874941 0.02117031 --0.007291666608 0.02566355 --0.007286458275 0.03031034 --0.007281249941 0.03504216 --0.007276041608 0.03981502 --0.007270833275 0.04460405 --0.007265624941 0.04939676 --0.007260416608 0.05418803 --0.007255208275 0.05897480 --0.007249999941 0.06375729 --0.007244791608 0.06853497 --0.007239583275 0.07330836 --0.007234374941 0.07807641 --0.007229166608 0.08283910 --0.007223958275 0.08759752 --0.007218749941 0.09235005 --0.007213541608 0.09709776 --0.007208333275 0.10183958 --0.007203124941 0.10657660 --0.007197916608 0.11130719 --0.007192708275 0.11603296 --0.007187499941 0.12075231 --0.007182291608 0.12546632 --0.007177083275 0.13017443 --0.007171874941 0.13487613 --0.007166666608 0.13957194 --0.007161458275 0.14426187 --0.007156249941 0.14894483 --0.007151041608 0.15362192 --0.007145833275 0.15829204 --0.007140624941 0.16295575 --0.007135416608 0.16761250 --0.007130208275 0.17226229 --0.007124999941 0.17690513 --0.007119791608 0.18154155 --0.007114583275 0.18617047 --0.007109374941 0.19079191 --0.007104166608 0.19540585 --0.007098958275 0.20001231 --0.007093749941 0.20461180 --0.007088541608 0.20920274 --0.007083333275 0.21378618 --0.007078124941 0.21836160 --0.007072916608 0.22292900 --0.007067708275 0.22748890 --0.007062499941 0.23203970 --0.007057291608 0.23658248 --0.007052083275 0.24111670 --0.007046874941 0.24564236 --0.007041666608 0.25015945 --0.007036458275 0.25466745 --0.007031249941 0.25916689 --0.007026041608 0.26365723 --0.007020833275 0.26813794 --0.007015624941 0.27261009 --0.007010416608 0.27707207 --0.007005208275 0.28152550 --0.006999999941 0.28596875 --0.006994791608 0.29040291 --0.006989583275 0.29482637 --0.006984374941 0.29924073 --0.006979166608 0.30364439 --0.006973958275 0.30803842 --0.006968749941 0.31242228 --0.006963541608 0.31679544 --0.006958333275 0.32115844 --0.006953124941 0.32551126 --0.006947916608 0.32985285 --0.006942708275 0.33418374 --0.006937499941 0.33850393 --0.006932291608 0.34281288 --0.006927083275 0.34711113 --0.006921874941 0.35139815 --0.006916666608 0.35567286 --0.006911458275 0.35993526 --0.006906249941 0.36418214 --0.006901041608 0.36840494 --0.006895833275 0.37258869 --0.006890624941 0.37670341 --0.006885416608 0.38069881 --0.006880208275 0.38450211 --0.006874999941 0.38802023 --0.006869791608 0.39115255 --0.006864583275 0.39380919 --0.006859374941 0.39593130 --0.006854166608 0.39750709 --0.006848958275 0.39857403 --0.006843749941 0.39921237 --0.006838541608 0.39952111 --0.006833333275 0.39960083 --0.006828124941 0.39953770 --0.006822916608 0.39939269 --0.006817708275 0.39920702 --0.006812499941 0.39900316 --0.006807291608 0.39879234 --0.006802083275 0.39857884 --0.006796874941 0.39836535 --0.006791666608 0.39815292 --0.006786458275 0.39793996 --0.006781249941 0.39772808 --0.006776041608 0.39751619 --0.006770833275 0.39730483 --0.006765624941 0.39709401 --0.006760416608 0.39688373 --0.006755208275 0.39667344 --0.006749999941 0.39646316 --0.006744791608 0.39625341 --0.006739583275 0.39604420 --0.006734374941 0.39583552 --0.006729166608 0.39562630 --0.006723958275 0.39541763 --0.006718749941 0.39520895 --0.006713541608 0.39500080 --0.006708333275 0.39479266 --0.006703124941 0.39458505 --0.006697916608 0.39437744 --0.006692708275 0.39416983 --0.006687499941 0.39396222 --0.006682291608 0.39375515 --0.006677083275 0.39354808 --0.006671874941 0.39334100 --0.006666666608 0.39313393 --0.006661458275 0.39292739 --0.006656249941 0.39272085 --0.006651041608 0.39251431 --0.006645833275 0.39230777 --0.006640624941 0.39210177 --0.006635416608 0.39189523 --0.006630208275 0.39168923 --0.006624999941 0.39148323 --0.006619791608 0.39127722 --0.006614583275 0.39107122 --0.006609374941 0.39086575 --0.006604166608 0.39066028 --0.006598958275 0.39045428 --0.006593749941 0.39024881 --0.006588541608 0.39004334 --0.006583333275 0.38983841 --0.006578124941 0.38963294 --0.006572916608 0.38942801 --0.006567708275 0.38922307 --0.006562499941 0.38901760 --0.006557291608 0.38881267 --0.006552083275 0.38860827 --0.006546874941 0.38840387 --0.006541666608 0.38819948 --0.006536458275 0.38799722 --0.006531249941 0.38779924 --0.006526041608 0.38761357 --0.006520833275 0.38745519 --0.006515624941 0.38735459 --0.006510416608 0.38736208 --0.006505208275 0.38754936 --0.006499999941 0.38801006 --0.006494791608 0.38884317 --0.006489583275 0.39013965 --0.006484374941 0.39195730 --0.006479166608 0.39430842 --0.006473958275 0.39715394 --0.006468749941 0.40041629 --0.006463541608 0.40399647 --0.006458333275 0.40779496 --0.006453124941 0.41172615 --0.006447916608 0.41572797 --0.006442708275 0.41975868 --0.006437499941 0.42379475 --0.006432291608 0.42782385 --0.006427083275 0.43184012 --0.006421874941 0.43584194 --0.006416666608 0.43982985 --0.006411458275 0.44380438 --0.006406249941 0.44776607 --0.006401041608 0.45171491 --0.006395833275 0.45565145 --0.006390624941 0.45957515 --0.006385416608 0.46348440 --0.006380208275 0.46737867 --0.006374999941 0.47125742 --0.006369791608 0.47512066 --0.006364583275 0.47896891 --0.006359374941 0.48280272 --0.006354166608 0.48662261 --0.006348958275 0.49042913 --0.006343749941 0.49422280 --0.006338541608 0.49800150 --0.006333333275 0.50176628 --0.006328124941 0.50551501 --0.006322916608 0.50924769 --0.006317708275 0.51296431 --0.006312499941 0.51666542 --0.006307291608 0.52035154 --0.006302083275 0.52402269 --0.006296874941 0.52767938 --0.006291666608 0.53132163 --0.006286458275 0.53494890 --0.006281249941 0.53856279 --0.006276041608 0.54215849 --0.006270833275 0.54573278 --0.006265624941 0.54929638 --0.006260416608 0.55284392 --0.006255208275 0.55637541 --0.006249999941 0.55988549 --0.006244791608 0.56338488 --0.006239583275 0.56686821 --0.006234374941 0.57033549 --0.006229166608 0.57378672 --0.006223958275 0.57722190 --0.006218749941 0.58064102 --0.006213541608 0.58403874 --0.006208333275 0.58742576 --0.006203124941 0.59079137 --0.006197916608 0.59414094 --0.006192708275 0.59747445 --0.006187499941 0.60079191 --0.006182291608 0.60408797 --0.006177083275 0.60737332 --0.006171874941 0.61063727 --0.006166666608 0.61388517 --0.006161458275 0.61711702 --0.006156249941 0.62032747 --0.006151041608 0.62352186 --0.006145833275 0.62670020 --0.006140624941 0.62986249 --0.006135416608 0.63300337 --0.006130208275 0.63612820 --0.006124999941 0.63923698 --0.006119791608 0.64232436 --0.006114583275 0.64539569 --0.006109374941 0.64844561 --0.006104166608 0.65147948 --0.006098958275 0.65449730 --0.006093749941 0.65749371 --0.006088541608 0.66047408 --0.006083333275 0.66343304 --0.006078124941 0.66637594 --0.006072916608 0.66929745 --0.006067708275 0.67218685 --0.006062499941 0.67504414 --0.006057291608 0.67783723 --0.006052083275 0.68053935 --0.006046874941 0.68309166 --0.006041666608 0.68542993 --0.006036458275 0.68748997 --0.006031249941 0.68920220 --0.006026041608 0.69052919 --0.006020833275 0.69146022 --0.006015624941 0.69203275 --0.006010416608 0.69228958 --0.006005208275 0.69231634 --0.005999999941 0.69218257 --0.005994791608 0.69194178 --0.005989583275 0.69164749 --0.005984374941 0.69132110 --0.005979166608 0.69097865 --0.005973958275 0.69063620 --0.005968749941 0.69029376 --0.005963541608 0.68994596 --0.005958333275 0.68960351 --0.005953124941 0.68925571 --0.005947916608 0.68891326 --0.005942708275 0.68857082 --0.005937499941 0.68822837 --0.005932291608 0.68788592 --0.005927083275 0.68754347 --0.005921874941 0.68720103 --0.005916666608 0.68685858 --0.005911458275 0.68651613 --0.005906249941 0.68617369 --0.005901041608 0.68583659 --0.005895833275 0.68549414 --0.005890624941 0.68515169 --0.005885416608 0.68481460 --0.005880208275 0.68447215 --0.005874999941 0.68413505 --0.005869791608 0.68379261 --0.005864583275 0.68345551 --0.005859374941 0.68311841 --0.005854166608 0.68277596 --0.005848958275 0.68243887 --0.005843749941 0.68210177 --0.005838541608 0.68175932 --0.005833333275 0.68142223 --0.005828124941 0.68108513 --0.005822916608 0.68074803 --0.005817708275 0.68041094 --0.005812499941 0.68007384 --0.005807291608 0.67973674 --0.005802083275 0.67939965 --0.005796874941 0.67906255 --0.005791666608 0.67872545 --0.005786458275 0.67838836 --0.005781249941 0.67805126 --0.005776041608 0.67771416 --0.005770833275 0.67737707 --0.005765624941 0.67703997 --0.005760416608 0.67670822 --0.005755208275 0.67637113 --0.005749999941 0.67603403 --0.005744791608 0.67569693 --0.005739583275 0.67536519 --0.005734374941 0.67502809 --0.005729166608 0.67469099 --0.005723958275 0.67435925 --0.005718749941 0.67402215 --0.005713541608 0.67369041 --0.005708333275 0.67335331 --0.005703124941 0.67302156 --0.005697916608 0.67268982 --0.005692708275 0.67236877 --0.005687499941 0.67206913 --0.005682291608 0.67180695 --0.005677083275 0.67162502 --0.005671874941 0.67156616 --0.005666666608 0.67169458 --0.005661458275 0.67209053 --0.005656249941 0.67280753 --0.005651041608 0.67389373 --0.005645833275 0.67534914 --0.005640624941 0.67715234 --0.005635416608 0.67923912 --0.005630208275 0.68155064 --0.005624999941 0.68401199 --0.005619791608 0.68655894 --0.005614583275 0.68914870 --0.005609374941 0.69174916 --0.005604166608 0.69434427 --0.005598958275 0.69692867 --0.005593749941 0.69948633 --0.005588541608 0.70202258 --0.005583333275 0.70454278 --0.005578124941 0.70703622 --0.005572916608 0.70951897 --0.005567708275 0.71198031 --0.005562499941 0.71442560 --0.005557291608 0.71685483 --0.005552083275 0.71926267 --0.005546874941 0.72164910 --0.005541666608 0.72401413 --0.005536458275 0.72635775 --0.005531249941 0.72867462 --0.005526041608 0.73098079 --0.005520833275 0.73326021 --0.005515624941 0.73552357 --0.005510416608 0.73777088 --0.005505208275 0.74000214 --0.005499999941 0.74220665 --0.005494791608 0.74439510 --0.005489583275 0.74655680 --0.005484374941 0.74870245 --0.005479166608 0.75082134 --0.005473958275 0.75292418 --0.005468749941 0.75500562 --0.005463541608 0.75706565 --0.005458333275 0.75910964 --0.005453124941 0.76113757 --0.005447916608 0.76313874 --0.005442708275 0.76512387 --0.005437499941 0.76708224 --0.005432291608 0.76902456 --0.005427083275 0.77094013 --0.005421874941 0.77283429 --0.005416666608 0.77471240 --0.005411458275 0.77656910 --0.005406249941 0.77840976 --0.005401041608 0.78022366 --0.005395833275 0.78202151 --0.005390624941 0.78379796 --0.005385416608 0.78555300 --0.005380208275 0.78728129 --0.005374999941 0.78899353 --0.005369791608 0.79067901 --0.005364583275 0.79234844 --0.005359374941 0.79399647 --0.005354166608 0.79562309 --0.005348958275 0.79723367 --0.005343749941 0.79881749 --0.005338541608 0.80038525 --0.005333333275 0.80192627 --0.005328124941 0.80344588 --0.005322916608 0.80494408 --0.005317708275 0.80642089 --0.005312499941 0.80788164 --0.005307291608 0.80931564 --0.005302083275 0.81072824 --0.005296874941 0.81212478 --0.005291666608 0.81349992 --0.005286458275 0.81484831 --0.005281249941 0.81617529 --0.005276041608 0.81748622 --0.005270833275 0.81877040 --0.005265624941 0.82003317 --0.005260416608 0.82127455 --0.005255208275 0.82249452 --0.005249999941 0.82369843 --0.005244791608 0.82487560 --0.005239583275 0.82603136 --0.005234374941 0.82716036 --0.005229166608 0.82825726 --0.005223958275 0.82931671 --0.005218749941 0.83032265 --0.005213541608 0.83124833 --0.005208333275 0.83206699 --0.005203124941 0.83275189 --0.005197916608 0.83327626 --0.005192708275 0.83361871 --0.005187499941 0.83378458 --0.005182291608 0.83378993 --0.005177083275 0.83365081 --0.005171874941 0.83341003 --0.005166666608 0.83309433 --0.005161458275 0.83273048 --0.005156249941 0.83234523 --0.005151041608 0.83194392 --0.005145833275 0.83154262 --0.005140624941 0.83113596 --0.005135416608 0.83072931 --0.005130208275 0.83032265 --0.005124999941 0.82991599 --0.005119791608 0.82950934 --0.005114583275 0.82910268 --0.005109374941 0.82869602 --0.005104166608 0.82828937 --0.005098958275 0.82788271 --0.005093749941 0.82747606 --0.005088541608 0.82707475 --0.005083333275 0.82666809 --0.005078124941 0.82626679 --0.005072916608 0.82586013 --0.005067708275 0.82545883 --0.005062499941 0.82505217 --0.005057291608 0.82465086 --0.005052083275 0.82424956 --0.005046874941 0.82384290 --0.005041666608 0.82344160 --0.005036458275 0.82304029 --0.005031249941 0.82263899 --0.005026041608 0.82223233 --0.005020833275 0.82183102 --0.005015624941 0.82142972 --0.005010416608 0.82102841 --0.005005208275 0.82062711 --0.004999999941 0.82022580 --0.004994791608 0.81982450 --0.004989583275 0.81942319 --0.004984374941 0.81902724 --0.004979166608 0.81862593 --0.004973958275 0.81822462 --0.004968749941 0.81782332 --0.004963541608 0.81742736 --0.004958333275 0.81702606 --0.004953124941 0.81662475 --0.004947916608 0.81622880 --0.004942708275 0.81582749 --0.004937499941 0.81542619 --0.004932291608 0.81503023 --0.004927083275 0.81462893 --0.004921874941 0.81423297 --0.004916666608 0.81383702 --0.004911458275 0.81343571 --0.004906249941 0.81303976 --0.004901041608 0.81264380 --0.004895833275 0.81224250 --0.004890624941 0.81184654 --0.004885416608 0.81145059 --0.004880208275 0.81105463 --0.004874999941 0.81065868 --0.004869791608 0.81026272 --0.004864583275 0.80986677 --0.004859374941 0.80947616 --0.004854166608 0.80909091 --0.004848958275 0.80872706 --0.004843749941 0.80839531 --0.004838541608 0.80811707 --0.004833333275 0.80792445 --0.004828124941 0.80783884 --0.004822916608 0.80789234 --0.004817708275 0.80809567 --0.004812499941 0.80845952 --0.004807291608 0.80897319 --0.004802083275 0.80960458 --0.004796874941 0.81033228 --0.004791666608 0.81112419 --0.004786458275 0.81195356 --0.004781249941 0.81280432 --0.004776041608 0.81367114 --0.004770833275 0.81453796 --0.004765624941 0.81540478 --0.004760416608 0.81627695 --0.004755208275 0.81714913 --0.004749999941 0.81802130 --0.004744791608 0.81890417 --0.004739583275 0.81979774 --0.004734374941 0.82069667 --0.004729166608 0.82160629 --0.004723958275 0.82251592 --0.004718749941 0.82342554 --0.004713541608 0.82433517 --0.004708333275 0.82523945 --0.004703124941 0.82613837 --0.004697916608 0.82702659 --0.004692708275 0.82791482 --0.004687499941 0.82879769 --0.004682291608 0.82968056 --0.004677083275 0.83055808 --0.004671874941 0.83143560 --0.004666666608 0.83231313 --0.004661458275 0.83318530 --0.004656249941 0.83406282 --0.004651041608 0.83493499 --0.004645833275 0.83580716 --0.004640624941 0.83667933 --0.004635416608 0.83755685 --0.004630208275 0.83843972 --0.004624999941 0.83932260 --0.004619791608 0.84021082 --0.004614583275 0.84109904 --0.004609374941 0.84198191 --0.004604166608 0.84286479 --0.004598958275 0.84374231 --0.004593749941 0.84461983 --0.004588541608 0.84549735 --0.004583333275 0.84636952 --0.004578124941 0.84724169 --0.004572916608 0.84811386 --0.004567708275 0.84898603 --0.004562499941 0.84985285 --0.004557291608 0.85072503 --0.004552083275 0.85159185 --0.004546874941 0.85245867 --0.004541666608 0.85332549 --0.004536458275 0.85419231 --0.004531249941 0.85505913 --0.004526041608 0.85592595 --0.004520833275 0.85679277 --0.004515624941 0.85766494 --0.004510416608 0.85853711 --0.004505208275 0.85940393 --0.004499999941 0.86027075 --0.004494791608 0.86113757 --0.004489583275 0.86200439 --0.004484374941 0.86286586 --0.004479166608 0.86372733 --0.004473958275 0.86458880 --0.004468749941 0.86545562 --0.004463541608 0.86631708 --0.004458333275 0.86717320 --0.004453124941 0.86803467 --0.004447916608 0.86889614 --0.004442708275 0.86975226 --0.004437499941 0.87060838 --0.004432291608 0.87146450 --0.004427083275 0.87232062 --0.004421874941 0.87317673 --0.004416666608 0.87403285 --0.004411458275 0.87489432 --0.004406249941 0.87574509 --0.004401041608 0.87659586 --0.004395833275 0.87744128 --0.004390624941 0.87826529 --0.004385416608 0.87905185 --0.004380208275 0.87979025 --0.004374999941 0.88044304 --0.004369791608 0.88098882 --0.004364583275 0.88139012 --0.004359374941 0.88164161 --0.004354166608 0.88173257 --0.004348958275 0.88167371 --0.004343749941 0.88149179 --0.004338541608 0.88121355 --0.004333333275 0.88087110 --0.004328124941 0.88048585 --0.004322916608 0.88007384 --0.004317708275 0.87965648 --0.004312499941 0.87922842 --0.004307291608 0.87880036 --0.004302083275 0.87837230 --0.004296874941 0.87794425 --0.004291666608 0.87751619 --0.004286458275 0.87708813 --0.004281249941 0.87666007 --0.004276041608 0.87623201 --0.004270833275 0.87580395 --0.004265624941 0.87538124 --0.004260416608 0.87495318 --0.004255208275 0.87452512 --0.004249999941 0.87410241 --0.004244791608 0.87367435 --0.004239583275 0.87325165 --0.004234374941 0.87282359 --0.004229166608 0.87240088 --0.004223958275 0.87197282 --0.004218749941 0.87155011 --0.004213541608 0.87112740 --0.004208333275 0.87070469 --0.004203124941 0.87027663 --0.004197916608 0.86985392 --0.004192708275 0.86943122 --0.004187499941 0.86900851 --0.004182291608 0.86858580 --0.004177083275 0.86816309 --0.004171874941 0.86774038 --0.004166666608 0.86731767 --0.004161458275 0.86689496 --0.004156249941 0.86647226 --0.004151041608 0.86605490 --0.004145833275 0.86563219 --0.004140624941 0.86520948 --0.004135416608 0.86478677 --0.004130208275 0.86436942 --0.004124999941 0.86394671 --0.004119791608 0.86352400 --0.004114583275 0.86310664 --0.004109374941 0.86268393 --0.004104166608 0.86226657 --0.004098958275 0.86184387 --0.004093749941 0.86142651 --0.004088541608 0.86100380 --0.004083333275 0.86058644 --0.004078124941 0.86016908 --0.004072916608 0.85974637 --0.004067708275 0.85932902 --0.004062499941 0.85891166 --0.004057291608 0.85849430 --0.004052083275 0.85807694 --0.004046874941 0.85765424 --0.004041666608 0.85723688 --0.004036458275 0.85682487 --0.004031249941 0.85640751 --0.004026041608 0.85599551 --0.004020833275 0.85559420 --0.004015624941 0.85520360 --0.004010416608 0.85485045 --0.004005208275 0.85455081 --0.003999999941 0.85433142 --0.003994791608 0.85421906 --0.003989583275 0.85424046 --0.003984374941 0.85441704 --0.003979166608 0.85474878 --0.003973958275 0.85521965 --0.003968749941 0.85581893 --0.003963541608 0.85650383 --0.003958333275 0.85725828 --0.003953124941 0.85805019 --0.003947916608 0.85886885 --0.003942708275 0.85969287 --0.003937499941 0.86052758 --0.003932291608 0.86136230 --0.003927083275 0.86220237 --0.003921874941 0.86304243 --0.003916666608 0.86388250 --0.003911458275 0.86473862 --0.003906249941 0.86560009 --0.003901041608 0.86646691 --0.003895833275 0.86734443 --0.003890624941 0.86822195 --0.003885416608 0.86910482 --0.003880208275 0.86997699 --0.003874999941 0.87084916 --0.003869791608 0.87171063 --0.003864583275 0.87256675 --0.003859374941 0.87341752 --0.003854166608 0.87426294 --0.003848958275 0.87510835 --0.003843749941 0.87595377 --0.003838541608 0.87679919 --0.003833333275 0.87763925 --0.003828124941 0.87847932 --0.003822916608 0.87931939 --0.003817708275 0.88015945 --0.003812499941 0.88099952 --0.003807291608 0.88184494 --0.003802083275 0.88268500 --0.003796874941 0.88353577 --0.003791666608 0.88439189 --0.003786458275 0.88524266 --0.003781249941 0.88609877 --0.003776041608 0.88694954 --0.003770833275 0.88780031 --0.003765624941 0.88864573 --0.003760416608 0.88948579 --0.003755208275 0.89032586 --0.003749999941 0.89116593 --0.003744791608 0.89200599 --0.003739583275 0.89284606 --0.003734374941 0.89368077 --0.003729166608 0.89452084 --0.003723958275 0.89535556 --0.003718749941 0.89619027 --0.003713541608 0.89702499 --0.003708333275 0.89785435 --0.003703124941 0.89868907 --0.003697916608 0.89952378 --0.003692708275 0.90035850 --0.003687499941 0.90119322 --0.003682291608 0.90203328 --0.003677083275 0.90286800 --0.003671874941 0.90370806 --0.003666666608 0.90454278 --0.003661458275 0.90537749 --0.003656249941 0.90620686 --0.003651041608 0.90703622 --0.003645833275 0.90786559 --0.003640624941 0.90869495 --0.003635416608 0.90952432 --0.003630208275 0.91035368 --0.003624999941 0.91118305 --0.003619791608 0.91200706 --0.003614583275 0.91283643 --0.003609374941 0.91366044 --0.003604166608 0.91448446 --0.003598958275 0.91530847 --0.003593749941 0.91613248 --0.003588541608 0.91695650 --0.003583333275 0.91778051 --0.003578124941 0.91860453 --0.003572916608 0.91942854 --0.003567708275 0.92024720 --0.003562499941 0.92105517 --0.003557291608 0.92184708 --0.003552083275 0.92260688 --0.003546874941 0.92330783 --0.003541666608 0.92393386 --0.003536458275 0.92444754 --0.003531249941 0.92482209 --0.003526041608 0.92504682 --0.003520833275 0.92511103 --0.003515624941 0.92503077 --0.003510416608 0.92482744 --0.003505208275 0.92452780 --0.003499999941 0.92416395 --0.003494791608 0.92375729 --0.003489583275 0.92332923 --0.003484374941 0.92289047 --0.003479166608 0.92244101 --0.003473958275 0.92199690 --0.003468749941 0.92154743 --0.003463541608 0.92109797 --0.003458333275 0.92065386 --0.003453124941 0.92020440 --0.003447916608 0.91975494 --0.003442708275 0.91931082 --0.003437499941 0.91886136 --0.003432291608 0.91841725 --0.003427083275 0.91797314 --0.003421874941 0.91752368 --0.003416666608 0.91707957 --0.003411458275 0.91663545 --0.003406249941 0.91619134 --0.003401041608 0.91574723 --0.003395833275 0.91530312 --0.003390624941 0.91485901 --0.003385416608 0.91441490 --0.003380208275 0.91397078 --0.003374999941 0.91352667 --0.003369791608 0.91308256 --0.003364583275 0.91263845 --0.003359374941 0.91219969 --0.003354166608 0.91175558 --0.003348958275 0.91131147 --0.003343749941 0.91087271 --0.003338541608 0.91042859 --0.003333333275 0.90998983 --0.003328124941 0.90954572 --0.003322916608 0.90910696 --0.003317708275 0.90866285 --0.003312499941 0.90822409 --0.003307291608 0.90777998 --0.003302083275 0.90734122 --0.003296874941 0.90690246 --0.003291666608 0.90646370 --0.003286458275 0.90601958 --0.003281249941 0.90558082 --0.003276041608 0.90514206 --0.003270833275 0.90470330 --0.003265624941 0.90426454 --0.003260416608 0.90382578 --0.003255208275 0.90338702 --0.003249999941 0.90294826 --0.003244791608 0.90250950 --0.003239583275 0.90207074 --0.003234374941 0.90163198 --0.003229166608 0.90119857 --0.003223958275 0.90075981 --0.003218749941 0.90032104 --0.003213541608 0.89988228 --0.003208333275 0.89944887 --0.003203124941 0.89901011 --0.003197916608 0.89857670 --0.003192708275 0.89814864 --0.003187499941 0.89772594 --0.003182291608 0.89731928 --0.003177083275 0.89694473 --0.003171874941 0.89662368 --0.003166666608 0.89638290 --0.003161458275 0.89624913 --0.003156249941 0.89624913 --0.003151041608 0.89639895 --0.003145833275 0.89669859 --0.003140624941 0.89714806 --0.003135416608 0.89770988 --0.003130208275 0.89836802 --0.003124999941 0.89908502 --0.003119791608 0.89985018 --0.003114583275 0.90063139 --0.003109374941 0.90142865 --0.003104166608 0.90223126 --0.003098958275 0.90303387 --0.003093749941 0.90383648 --0.003088541608 0.90464444 --0.003083333275 0.90545776 --0.003078124941 0.90627642 --0.003072916608 0.90710578 --0.003067708275 0.90794585 --0.003062499941 0.90879127 --0.003057291608 0.90964204 --0.003052083275 0.91049280 --0.003046874941 0.91133822 --0.003041666608 0.91217829 --0.003036458275 0.91300765 --0.003031249941 0.91383167 --0.003026041608 0.91465033 --0.003020833275 0.91546899 --0.003015624941 0.91628231 --0.003010416608 0.91709562 --0.003005208275 0.91790893 --0.002999999941 0.91871689 --0.002994791608 0.91953020 --0.002989583275 0.92033817 --0.002984374941 0.92114613 --0.002979166608 0.92195409 --0.002973958275 0.92276205 --0.002968749941 0.92358072 --0.002963541608 0.92439938 --0.002958333275 0.92521804 --0.002953124941 0.92604206 --0.002947916608 0.92686607 --0.002942708275 0.92769009 --0.002937499941 0.92850875 --0.002932291608 0.92932206 --0.002927083275 0.93013537 --0.002921874941 0.93094334 --0.002916666608 0.93175130 --0.002911458275 0.93255926 --0.002906249941 0.93336722 --0.002901041608 0.93417518 --0.002895833275 0.93498315 --0.002890624941 0.93578576 --0.002885416608 0.93659372 --0.002880208275 0.93739633 --0.002874999941 0.93819894 --0.002869791608 0.93900155 --0.002864583275 0.93980416 --0.002859374941 0.94060677 --0.002854166608 0.94141474 --0.002848958275 0.94222270 --0.002843749941 0.94303066 --0.002838541608 0.94383862 --0.002833333275 0.94464123 --0.002828124941 0.94544384 --0.002822916608 0.94624646 --0.002817708275 0.94704907 --0.002812499941 0.94784633 --0.002807291608 0.94864359 --0.002802083275 0.94944085 --0.002796874941 0.95024346 --0.002791666608 0.95104072 --0.002786458275 0.95183798 --0.002781249941 0.95262989 --0.002776041608 0.95342715 --0.002770833275 0.95421906 --0.002765624941 0.95501632 --0.002760416608 0.95580823 --0.002755208275 0.95660549 --0.002749999941 0.95739740 --0.002744791608 0.95819466 --0.002739583275 0.95898657 --0.002734374941 0.95977313 --0.002729166608 0.96055434 --0.002723958275 0.96131414 --0.002718749941 0.96204719 --0.002713541608 0.96272139 --0.002708333275 0.96331532 --0.002703124941 0.96380224 --0.002697916608 0.96415539 --0.002692708275 0.96435336 --0.002687499941 0.96439617 --0.002682291608 0.96429450 --0.002677083275 0.96406977 --0.002671874941 0.96375408 --0.002666666608 0.96336883 --0.002661458275 0.96294612 --0.002656249941 0.96249666 --0.002651041608 0.96204184 --0.002645833275 0.96157633 --0.002640624941 0.96111081 --0.002635416608 0.96064530 --0.002630208275 0.96017978 --0.002624999941 0.95971427 --0.002619791608 0.95924876 --0.002614583275 0.95878324 --0.002609374941 0.95832308 --0.002604166608 0.95785756 --0.002598958275 0.95739205 --0.002593749941 0.95692653 --0.002588541608 0.95646637 --0.002583333275 0.95600086 --0.002578124941 0.95554069 --0.002572916608 0.95507518 --0.002567708275 0.95461501 --0.002562499941 0.95415485 --0.002557291608 0.95368934 --0.002552083275 0.95322917 --0.002546874941 0.95276901 --0.002541666608 0.95230884 --0.002536458275 0.95184868 --0.002531249941 0.95138852 --0.002526041608 0.95092835 --0.002520833275 0.95046819 --0.002515624941 0.95000803 --0.002510416608 0.94954786 --0.002505208275 0.94908770 --0.002499999941 0.94862753 --0.002494791608 0.94816737 --0.002489583275 0.94771256 --0.002484374941 0.94725239 --0.002479166608 0.94679223 --0.002473958275 0.94633742 --0.002468749941 0.94587725 --0.002463541608 0.94542244 --0.002458333275 0.94496228 --0.002453124941 0.94450746 --0.002447916608 0.94404730 --0.002442708275 0.94359249 --0.002437499941 0.94313767 --0.002432291608 0.94267751 --0.002427083275 0.94222270 --0.002421874941 0.94176788 --0.002416666608 0.94130772 --0.002411458275 0.94085291 --0.002406249941 0.94039810 --0.002401041608 0.93994328 --0.002395833275 0.93948847 --0.002390624941 0.93903366 --0.002385416608 0.93857884 --0.002380208275 0.93812403 --0.002374999941 0.93766922 --0.002369791608 0.93721975 --0.002364583275 0.93676494 --0.002359374941 0.93631548 --0.002354166608 0.93587672 --0.002348958275 0.93545401 --0.002343749941 0.93506341 --0.002338541608 0.93472631 --0.002333333275 0.93446412 --0.002328124941 0.93430895 --0.002322916608 0.93428755 --0.002317708275 0.93441597 --0.002312499941 0.93469421 --0.002307291608 0.93511156 --0.002302083275 0.93564664 --0.002296874941 0.93627267 --0.002291666608 0.93696827 --0.002286458275 0.93769597 --0.002281249941 0.93845578 --0.002276041608 0.93922093 --0.002270833275 0.93999144 --0.002265624941 0.94076730 --0.002260416608 0.94154315 --0.002255208275 0.94231901 --0.002249999941 0.94310557 --0.002244791608 0.94389748 --0.002239583275 0.94470009 --0.002234374941 0.94551340 --0.002229166608 0.94633207 --0.002223958275 0.94715608 --0.002218749941 0.94798010 --0.002213541608 0.94879876 --0.002208333275 0.94960672 --0.002203124941 0.95041468 --0.002197916608 0.95120659 --0.002192708275 0.95199850 --0.002187499941 0.95278506 --0.002182291608 0.95357162 --0.002177083275 0.95435818 --0.002171874941 0.95513939 --0.002166666608 0.95592595 --0.002161458275 0.95670715 --0.002156249941 0.95748301 --0.002151041608 0.95826422 --0.002145833275 0.95904543 --0.002140624941 0.95983199 --0.002135416608 0.96061855 --0.002130208275 0.96141046 --0.002124999941 0.96220237 --0.002119791608 0.96299963 --0.002114583275 0.96379689 --0.002109374941 0.96459415 --0.002104166608 0.96538606 --0.002098958275 0.96617261 --0.002093749941 0.96695917 --0.002088541608 0.96774038 --0.002083333275 0.96852159 --0.002078124941 0.96930280 --0.002072916608 0.97007866 --0.002067708275 0.97085986 --0.002062499941 0.97164107 --0.002057291608 0.97241693 --0.002052083275 0.97319279 --0.002046874941 0.97396864 --0.002041666608 0.97474450 --0.002036458275 0.97552036 --0.002031249941 0.97629622 --0.002026041608 0.97707207 --0.002020833275 0.97785328 --0.002015624941 0.97863449 --0.002010416608 0.97941570 --0.002005208275 0.98019691 --0.001999999941 0.98097276 --0.001994791608 0.98174862 --0.001989583275 0.98252448 --0.001984374941 0.98329499 --0.001979166608 0.98407084 --0.001973958275 0.98484135 --0.001968749941 0.98561186 --0.001963541608 0.98638236 --0.001958333275 0.98715287 --0.001953124941 0.98792338 --0.001947916608 0.98869388 --0.001942708275 0.98945904 --0.001937499941 0.99022955 --0.001932291608 0.99099470 --0.001927083275 0.99176521 --0.001921874941 0.99253037 --0.001916666608 0.99330087 --0.001911458275 0.99406603 --0.001906249941 0.99483118 --0.001901041608 0.99559634 --0.001895833275 0.99635079 --0.001890624941 0.99708385 --0.001885416608 0.99779014 --0.001880208275 0.99843758 --0.001874999941 0.99901011 --0.001869791608 0.99947028 --0.001864583275 0.99979667 --0.001859374941 0.99997325 --0.001854166608 1.00000000 --0.001848958275 0.99987693 --0.001843749941 0.99963615 --0.001838541608 0.99929905 --0.001833333275 0.99889775 --0.001828124941 0.99845899 --0.001822916608 0.99799347 --0.001817708275 0.99752261 --0.001812499941 0.99704104 --0.001807291608 0.99655947 --0.001802083275 0.99607791 --0.001796874941 0.99559634 --0.001791666608 0.99511477 --0.001786458275 0.99463321 --0.001781249941 0.99415164 --0.001776041608 0.99367542 --0.001770833275 0.99319386 --0.001765624941 0.99271229 --0.001760416608 0.99223607 --0.001755208275 0.99175451 --0.001749999941 0.99127294 --0.001744791608 0.99079673 --0.001739583275 0.99032051 --0.001734374941 0.98983894 --0.001729166608 0.98936273 --0.001723958275 0.98888651 --0.001718749941 0.98840494 --0.001713541608 0.98792873 --0.001708333275 0.98745251 --0.001703124941 0.98697630 --0.001697916608 0.98650008 --0.001692708275 0.98602386 --0.001687499941 0.98554765 --0.001682291608 0.98507143 --0.001677083275 0.98459522 --0.001671874941 0.98412435 --0.001666666608 0.98364814 --0.001661458275 0.98317192 --0.001656249941 0.98269570 --0.001651041608 0.98222484 --0.001645833275 0.98174862 --0.001640624941 0.98127776 --0.001635416608 0.98080154 --0.001630208275 0.98032533 --0.001624999941 0.97985446 --0.001619791608 0.97938359 --0.001614583275 0.97890738 --0.001609374941 0.97843651 --0.001604166608 0.97796565 --0.001598958275 0.97748943 --0.001593749941 0.97701857 --0.001588541608 0.97654770 --0.001583333275 0.97607684 --0.001578124941 0.97560597 --0.001572916608 0.97513511 --0.001567708275 0.97466424 --0.001562499941 0.97419338 --0.001557291608 0.97372251 --0.001552083275 0.97325165 --0.001546874941 0.97278078 --0.001541666608 0.97230991 --0.001536458275 0.97184440 --0.001531249941 0.97137354 --0.001526041608 0.97090802 --0.001520833275 0.97045321 --0.001515624941 0.97001445 --0.001510416608 0.96960779 --0.001505208275 0.96924929 --0.001499999941 0.96897105 --0.001494791608 0.96879983 --0.001489583275 0.96876237 --0.001484374941 0.96886404 --0.001479166608 0.96912087 --0.001473958275 0.96951683 --0.001468749941 0.97003050 --0.001463541608 0.97062978 --0.001458333275 0.97129862 --0.001453124941 0.97200492 --0.001447916608 0.97273262 --0.001442708275 0.97347638 --0.001437499941 0.97422013 --0.001432291608 0.97496923 --0.001427083275 0.97572369 --0.001421874941 0.97647814 --0.001416666608 0.97723795 --0.001411458275 0.97800845 --0.001406249941 0.97878431 --0.001401041608 0.97957622 --0.001395833275 0.98036813 --0.001390624941 0.98117074 --0.001385416608 0.98196800 --0.001380208275 0.98276526 --0.001374999941 0.98355182 --0.001369791608 0.98432768 --0.001364583275 0.98509284 --0.001359374941 0.98584729 --0.001354166608 0.98658034 --0.001348958275 0.98727594 --0.001343749941 0.98790733 --0.001338541608 0.98843170 --0.001333333275 0.98878485 --0.001328124941 0.98890256 --0.001322916608 0.98874204 --0.001317708275 0.98824977 --0.001312499941 0.98742576 --0.001307291608 0.98629140 --0.001302083275 0.98488951 --0.001296874941 0.98328964 --0.001291666608 0.98154529 --0.001286458275 0.97970464 --0.001281249941 0.97780513 --0.001276041608 0.97585746 --0.001270833275 0.97388303 --0.001265624941 0.97186580 --0.001260416608 0.96982182 --0.001255208275 0.96774573 --0.001249999941 0.96564289 --0.001244791608 0.96351330 --0.001239583275 0.96137300 --0.001234374941 0.95921130 --0.001229166608 0.95703355 --0.001223958275 0.95483439 --0.001218749941 0.95261919 --0.001213541608 0.95037188 --0.001208333275 0.94809246 --0.001203124941 0.94579164 --0.001197916608 0.94345872 --0.001192708275 0.94110439 --0.001187499941 0.93873401 --0.001182291608 0.93634223 --0.001177083275 0.93393440 --0.001171874941 0.93151051 --0.001166666608 0.92906523 --0.001161458275 0.92658783 --0.001156249941 0.92408904 --0.001151041608 0.92156349 --0.001145833275 0.91901118 --0.001140624941 0.91643748 --0.001135416608 0.91384237 --0.001130208275 0.91123121 --0.001124999941 0.90859864 --0.001119791608 0.90595537 --0.001114583275 0.90328536 --0.001109374941 0.90058858 --0.001104166608 0.89787041 --0.001098958275 0.89513083 --0.001093749941 0.89236449 --0.001088541608 0.88957141 --0.001083333275 0.88676762 --0.001078124941 0.88393708 --0.001072916608 0.88109583 --0.001067708275 0.87823319 --0.001062499941 0.87534914 --0.001057291608 0.87244903 --0.001052083275 0.86951683 --0.001046874941 0.86656857 --0.001041666608 0.86359356 --0.001036458275 0.86059714 --0.001031249941 0.85758468 --0.001026041608 0.85455081 --0.001020833275 0.85150088 --0.001015624941 0.84843491 --0.001010416608 0.84534753 --0.001005208275 0.84223875 --0.000999999941 0.83910857 --0.000994791608 0.83595698 --0.000989583275 0.83278399 --0.000984374941 0.82959495 --0.000979166608 0.82637915 --0.000973958275 0.82315266 --0.000968749941 0.81990476 --0.000963541608 0.81664080 --0.000958333275 0.81336080 --0.000953124941 0.81005939 --0.000947916608 0.80673658 --0.000942708275 0.80339237 --0.000937499941 0.80003210 --0.000932291608 0.79664509 --0.000927083275 0.79324736 --0.000921874941 0.78983359 --0.000916666608 0.78639842 --0.000911458275 0.78294719 --0.000906249941 0.77947991 --0.000901041608 0.77599122 --0.000895833275 0.77248649 --0.000890624941 0.76896570 --0.000885416608 0.76541816 --0.000880208275 0.76185992 --0.000874999941 0.75828027 --0.000869791608 0.75468457 --0.000864583275 0.75107282 --0.000859374941 0.74744502 --0.000854166608 0.74380117 --0.000848958275 0.74014126 --0.000843749941 0.73646530 --0.000838541608 0.73276794 --0.000833333275 0.72905452 --0.000828124941 0.72532506 --0.000822916608 0.72157419 --0.000817708275 0.71781262 --0.000812499941 0.71403499 --0.000807291608 0.71024132 --0.000802083275 0.70643159 --0.000796874941 0.70260581 --0.000791666608 0.69876398 --0.000786458275 0.69490609 --0.000781249941 0.69103216 --0.000776041608 0.68714217 --0.000770833275 0.68323613 --0.000765624941 0.67931403 --0.000760416608 0.67538124 --0.000755208275 0.67143239 --0.000749999941 0.66746749 --0.000744791608 0.66349189 --0.000739583275 0.65949489 --0.000734374941 0.65548718 --0.000729166608 0.65146343 --0.000723958275 0.64742362 --0.000718749941 0.64336776 --0.000713541608 0.63930119 --0.000708333275 0.63522393 --0.000703124941 0.63113061 --0.000697916608 0.62702124 --0.000692708275 0.62290117 --0.000687499941 0.61876505 --0.000682291608 0.61461287 --0.000677083275 0.61045000 --0.000671874941 0.60627642 --0.000666666608 0.60208679 --0.000661458275 0.59788111 --0.000656249941 0.59367007 --0.000651041608 0.58943764 --0.000645833275 0.58519985 --0.000640624941 0.58094601 --0.000635416608 0.57668147 --0.000630208275 0.57240623 --0.000624999941 0.56811493 --0.000619791608 0.56381294 --0.000614583275 0.55949489 --0.000609374941 0.55516614 --0.000604166608 0.55083204 --0.000598958275 0.54648189 --0.000593749941 0.54212103 --0.000588541608 0.53774948 --0.000583333275 0.53336348 --0.000578124941 0.52896838 --0.000572916608 0.52456151 --0.000567708275 0.52014393 --0.000562499941 0.51571513 --0.000557291608 0.51127562 --0.000552083275 0.50682648 --0.000546874941 0.50236610 --0.000541666608 0.49789662 --0.000536458275 0.49341591 --0.000531249941 0.48892557 --0.000526041608 0.48442506 --0.000520833275 0.47991385 --0.000515624941 0.47539301 --0.000510416608 0.47086307 --0.000505208275 0.46632297 --0.000499999941 0.46177377 --0.000494791608 0.45721547 --0.000489583275 0.45264862 --0.000484374941 0.44807213 --0.000479166608 0.44348761 --0.000473958275 0.43889293 --0.000468749941 0.43429022 --0.000463541608 0.42967842 --0.000458333275 0.42505859 --0.000453124941 0.42043020 --0.000447916608 0.41579378 --0.000442708275 0.41114934 --0.000437499941 0.40649740 --0.000432291608 0.40183744 --0.000427083275 0.39716999 --0.000421874941 0.39249452 --0.000416666608 0.38781155 --0.000411458275 0.38312162 --0.000406249941 0.37842421 --0.000401041608 0.37371930 --0.000395833275 0.36900797 --0.000390624941 0.36428969 --0.000385416608 0.35956498 --0.000380208275 0.35483386 --0.000374999941 0.35009578 --0.000369791608 0.34535181 --0.000364583275 0.34060142 --0.000359374941 0.33584515 --0.000354166608 0.33108245 --0.000348958275 0.32631441 --0.000343749941 0.32153994 --0.000338541608 0.31676066 --0.000333333275 0.31197603 --0.000328124941 0.30718605 --0.000322916608 0.30239071 --0.000317708275 0.29759003 --0.000312499941 0.29278453 --0.000307291608 0.28797421 --0.000302083275 0.28315908 --0.000296874941 0.27833967 --0.000291666608 0.27351544 --0.000286458275 0.26868746 --0.000281249941 0.26385521 --0.000276041608 0.25901867 --0.000270833275 0.25417839 --0.000265624941 0.24933383 --0.000260416608 0.24448606 --0.000255208275 0.23963454 --0.000249999941 0.23477982 --0.000244791608 0.22992134 --0.000239583275 0.22506020 --0.000234374941 0.22019584 --0.000229166608 0.21532827 --0.000223958275 0.21045856 --0.000218749941 0.20558564 --0.000213541608 0.20071058 --0.000208333275 0.19583284 --0.000203124941 0.19095297 --0.000197916608 0.18607095 --0.000192708275 0.18118679 --0.000187499941 0.17630050 --0.000182291608 0.17141260 --0.000177083275 0.16652362 --0.000171874941 0.16163251 --0.000166666608 0.15674033 --0.000161458275 0.15184708 --0.000156249941 0.14695222 --0.000151041608 0.14205629 --0.000145833275 0.13715983 --0.000140624941 0.13226283 --0.000135416608 0.12736476 --0.000130208275 0.12246616 --0.000124999941 0.11756755 --0.000119791608 0.11266895 --0.000114583275 0.10776981 --0.000109374941 0.10287121 --0.000104166608 0.09797260 --0.000098958275 0.09307400 --0.000093749941 0.08817593 --0.000088541608 0.08327893 --0.000083333275 0.07838017 --0.000078124941 0.07348141 --0.000072916608 0.06858264 --0.000067708275 0.06368388 --0.000062499941 0.05878511 --0.000057291608 0.05388635 --0.000052083275 0.04898759 --0.000046874941 0.04408882 --0.000041666608 0.03919011 --0.000036458275 0.03429135 --0.000031249941 0.02939258 --0.000026041608 0.02449382 --0.000020833275 0.01959506 --0.000015624941 0.01469629 --0.000010416608 0.00979753 --0.000005208275 0.00489876 - 0.000000000059 0.00000000 - 0.000005208392 0.00000000 - 0.000010416725 0.00000000 - 0.000015625059 0.00000000 - 0.000020833392 0.00000000 - 0.000026041725 0.00000000 - 0.000031250059 0.00000000 - 0.000036458392 0.00000000 - 0.000041666725 0.00000000 - 0.000046875059 0.00000000 - 0.000052083392 0.00000000 - 0.000057291725 0.00000000 - 0.000062500059 0.00000000 - 0.000067708392 0.00000000 - 0.000072916725 0.00000000 - 0.000078125059 0.00000000 - 0.000083333392 0.00000000 - 0.000088541725 0.00000000 - 0.000093750059 0.00000000 - 0.000098958392 0.00000000 - 0.000104166725 0.00000000 - 0.000109375059 0.00000000 - 0.000114583392 0.00000000 - 0.000119791725 0.00000000 - 0.000125000059 0.00000000 - 0.000130208392 0.00000000 - 0.000135416725 0.00000000 - 0.000140625059 0.00000000 - 0.000145833392 0.00000000 - 0.000151041725 0.00000000 - 0.000156250059 0.00000000 - 0.000161458392 0.00000000 - 0.000166666725 0.00000000 - 0.000171875059 0.00000000 - 0.000177083392 0.00000000 - 0.000182291725 0.00000000 - 0.000187500059 0.00000000 - 0.000192708392 0.00000000 - 0.000197916725 0.00000000 - 0.000203125059 0.00000000 - 0.000208333392 0.00000000 - 0.000213541725 0.00000000 - 0.000218750059 0.00000000 - 0.000223958392 0.00000000 - 0.000229166725 0.00000000 - 0.000234375059 0.00000000 - 0.000239583392 0.00000000 - 0.000244791725 0.00000000 - 0.000250000059 0.00000000 - 0.000255208392 0.00000000 - 0.000260416725 0.00000000 - 0.000265625059 0.00000000 - 0.000270833392 0.00000000 - 0.000276041725 0.00000000 - 0.000281250059 0.00000000 - 0.000286458392 0.00000000 - 0.000291666725 0.00000000 - 0.000296875059 0.00000000 - 0.000302083392 0.00000000 - 0.000307291725 0.00000000 - 0.000312500059 0.00000000 - 0.000317708392 0.00000000 - 0.000322916725 0.00000000 - 0.000328125059 0.00000000 - 0.000333333392 0.00000000 - 0.000338541725 0.00000000 - 0.000343750059 0.00000000 - 0.000348958392 0.00000000 - 0.000354166725 0.00000000 - 0.000359375059 0.00000000 - 0.000364583392 0.00000000 - 0.000369791725 0.00000000 - 0.000375000059 0.00000000 - 0.000380208392 0.00000000 - 0.000385416725 0.00000000 - 0.000390625059 0.00000000 - 0.000395833392 0.00000000 - 0.000401041725 0.00000000 - 0.000406250059 0.00000000 - 0.000411458392 0.00000000 - 0.000416666725 0.00000000 - 0.000421875059 0.00000000 - 0.000427083392 0.00000000 - 0.000432291725 0.00000000 - 0.000437500059 0.00000000 - 0.000442708392 0.00000000 - 0.000447916725 0.00000000 - 0.000453125059 0.00000000 - 0.000458333392 0.00000000 - 0.000463541725 0.00000000 - 0.000468750059 0.00000000 - 0.000473958392 0.00000000 - 0.000479166725 0.00000000 - 0.000484375059 0.00000000 - 0.000489583392 0.00000000 - 0.000494791725 0.00000000 - 0.000500000059 0.00000000 - 0.000505208392 0.00000000 - 0.000510416725 0.00000000 - 0.000515625059 0.00000000 - 0.000520833392 0.00000000 - 0.000526041725 0.00000000 - 0.000531250059 0.00000000 - 0.000536458392 0.00000000 - 0.000541666725 0.00000000 - 0.000546875059 0.00000000 - 0.000552083392 0.00000000 - 0.000557291725 0.00000000 - 0.000562500059 0.00000000 - 0.000567708392 0.00000000 - 0.000572916725 0.00000000 - 0.000578125059 0.00000000 - 0.000583333392 0.00000000 - 0.000588541725 0.00000000 - 0.000593750059 0.00000000 - 0.000598958392 0.00000000 - 0.000604166725 0.00000000 - 0.000609375059 0.00000000 - 0.000614583392 0.00000000 - 0.000619791725 0.00000000 - 0.000625000059 0.00000000 - 0.000630208392 0.00000000 - 0.000635416725 0.00000000 - 0.000640625059 0.00000000 - 0.000645833392 0.00000000 - 0.000651041725 0.00000000 - 0.000656250059 0.00000000 - 0.000661458392 0.00000000 - 0.000666666725 0.00000000 - 0.000671875059 0.00000000 - 0.000677083392 0.00000000 - 0.000682291725 0.00000000 - 0.000687500059 0.00000000 - 0.000692708392 0.00000000 - 0.000697916725 0.00000000 - 0.000703125059 0.00000000 - 0.000708333392 0.00000000 - 0.000713541725 0.00000000 - 0.000718750059 0.00000000 - 0.000723958392 0.00000000 - 0.000729166725 0.00000000 - 0.000734375059 0.00000000 - 0.000739583392 0.00000000 - 0.000744791725 0.00000000 - 0.000750000059 0.00000000 - 0.000755208392 0.00000000 - 0.000760416725 0.00000000 - 0.000765625059 0.00000000 - 0.000770833392 0.00000000 - 0.000776041725 0.00000000 - 0.000781250059 0.00000000 - 0.000786458392 0.00000000 - 0.000791666725 0.00000000 - 0.000796875059 0.00000000 - 0.000802083392 0.00000000 - 0.000807291725 0.00000000 - 0.000812500059 0.00000000 - 0.000817708392 0.00000000 - 0.000822916725 0.00000000 - 0.000828125059 0.00000000 - 0.000833333392 0.00000000 - 0.000838541725 0.00000000 - 0.000843750059 0.00000000 - 0.000848958392 0.00000000 - 0.000854166725 0.00000000 - 0.000859375059 0.00000000 - 0.000864583392 0.00000000 - 0.000869791725 0.00000000 - 0.000875000059 0.00000000 - 0.000880208392 0.00000000 - 0.000885416725 0.00000000 - 0.000890625059 0.00000000 - 0.000895833392 0.00000000 - 0.000901041725 0.00000000 - 0.000906250059 0.00000000 - 0.000911458392 0.00000000 - 0.000916666725 0.00000000 - 0.000921875059 0.00000000 - 0.000927083392 0.00000000 - 0.000932291725 0.00000000 - 0.000937500059 0.00000000 - 0.000942708392 0.00000000 - 0.000947916725 0.00000000 - 0.000953125059 0.00000000 - 0.000958333392 0.00000000 - 0.000963541725 0.00000000 - 0.000968750059 0.00000000 - 0.000973958392 0.00000000 - 0.000979166725 0.00000000 - 0.000984375059 0.00000000 - 0.000989583392 0.00000000 - 0.000994791725 0.00000000 - 0.001000000059 0.00000000 - 0.001005208392 0.00000000 - 0.001010416725 0.00000000 - 0.001015625059 0.00000000 - 0.001020833392 0.00000000 - 0.001026041725 0.00000000 - 0.001031250059 0.00000000 - 0.001036458392 0.00000000 - 0.001041666725 0.00000000 - 0.001046875059 0.00000000 - 0.001052083392 0.00000000 - 0.001057291725 0.00000000 - 0.001062500059 0.00000000 - 0.001067708392 0.00000000 - 0.001072916725 0.00000000 - 0.001078125059 0.00000000 - 0.001083333392 0.00000000 - 0.001088541725 0.00000000 - 0.001093750059 0.00000000 - 0.001098958392 0.00000000 - 0.001104166725 0.00000000 - 0.001109375059 0.00000000 - 0.001114583392 0.00000000 - 0.001119791725 0.00000000 - 0.001125000059 0.00000000 - 0.001130208392 0.00000000 - 0.001135416725 0.00000000 - 0.001140625059 0.00000000 - 0.001145833392 0.00000000 - 0.001151041725 0.00000000 - 0.001156250059 0.00000000 - 0.001161458392 0.00000000 - 0.001166666725 0.00000000 - 0.001171875059 0.00000000 - 0.001177083392 0.00000000 - 0.001182291725 0.00000000 - 0.001187500059 0.00000000 - 0.001192708392 0.00000000 - 0.001197916725 0.00000000 - 0.001203125059 0.00000000 - 0.001208333392 0.00000000 - 0.001213541725 0.00000000 - 0.001218750059 0.00000000 - 0.001223958392 0.00000000 - 0.001229166725 0.00000000 - 0.001234375059 0.00000000 - 0.001239583392 0.00000000 - 0.001244791725 0.00000000 - 0.001250000059 0.00000000 - 0.001255208392 0.00000000 - 0.001260416725 0.00000000 - 0.001265625059 0.00000000 - 0.001270833392 0.00000000 - 0.001276041725 0.00000000 - 0.001281250059 0.00000000 - 0.001286458392 0.00000000 - 0.001291666725 0.00000000 - 0.001296875059 0.00000000 - 0.001302083392 0.00000000 - 0.001307291725 0.00000000 - 0.001312500059 0.00000000 - 0.001317708392 0.00000000 - 0.001322916725 0.00000000 - 0.001328125059 0.00000000 - 0.001333333392 0.00000000 - 0.001338541725 0.00000000 - 0.001343750059 0.00000000 - 0.001348958392 0.00000000 - 0.001354166725 0.00000000 - 0.001359375059 0.00000000 - 0.001364583392 0.00000000 - 0.001369791725 0.00000000 - 0.001375000059 0.00000000 - 0.001380208392 0.00000000 - 0.001385416725 0.00000000 - 0.001390625059 0.00000000 - 0.001395833392 0.00000000 - 0.001401041725 0.00000000 - 0.001406250059 0.00000000 - 0.001411458392 0.00000000 - 0.001416666725 0.00000000 - 0.001421875059 0.00000000 - 0.001427083392 0.00000000 - 0.001432291725 0.00000000 - 0.001437500059 0.00000000 - 0.001442708392 0.00000000 - 0.001447916725 0.00000000 - 0.001453125059 0.00000000 - 0.001458333392 0.00000000 - 0.001463541725 0.00000000 - 0.001468750059 0.00000000 - 0.001473958392 0.00000000 - 0.001479166725 0.00000000 - 0.001484375059 0.00000000 - 0.001489583392 0.00000000 - 0.001494791725 0.00000000 - 0.001500000059 0.00000000 - 0.001505208392 0.00000000 - 0.001510416725 0.00000000 - 0.001515625059 0.00000000 - 0.001520833392 0.00000000 - 0.001526041725 0.00000000 - 0.001531250059 0.00000000 - 0.001536458392 0.00000000 - 0.001541666725 0.00000000 - 0.001546875059 0.00000000 - 0.001552083392 0.00000000 - 0.001557291725 0.00000000 - 0.001562500059 0.00000000 - 0.001567708392 0.00000000 - 0.001572916725 0.00000000 - 0.001578125059 0.00000000 - 0.001583333392 0.00000000 - 0.001588541725 0.00000000 - 0.001593750059 0.00000000 - 0.001598958392 0.00000000 - 0.001604166725 0.00000000 - 0.001609375059 0.00000000 - 0.001614583392 0.00000000 - 0.001619791725 0.00000000 - 0.001625000059 0.00000000 - 0.001630208392 0.00000000 - 0.001635416725 0.00000000 - 0.001640625059 0.00000000 - 0.001645833392 0.00000000 - 0.001651041725 0.00000000 - 0.001656250059 0.00000000 - 0.001661458392 0.00000000 - 0.001666666725 0.00000000 - 0.001671875059 0.00000000 - 0.001677083392 0.00000000 - 0.001682291725 0.00000000 - 0.001687500059 0.00000000 - 0.001692708392 0.00000000 - 0.001697916725 0.00000000 - 0.001703125059 0.00000000 - 0.001708333392 0.00000000 - 0.001713541725 0.00000000 - 0.001718750059 0.00000000 - 0.001723958392 0.00000000 - 0.001729166725 0.00000000 - 0.001734375059 0.00000000 - 0.001739583392 0.00000000 - 0.001744791725 0.00000000 - 0.001750000059 0.00000000 - 0.001755208392 0.00000000 - 0.001760416725 0.00000000 - 0.001765625059 0.00000000 - 0.001770833392 0.00000000 - 0.001776041725 0.00000000 - 0.001781250059 0.00000000 - 0.001786458392 0.00000000 - 0.001791666725 0.00000000 - 0.001796875059 0.00000000 - 0.001802083392 0.00000000 - 0.001807291725 0.00000000 - 0.001812500059 0.00000000 - 0.001817708392 0.00000000 - 0.001822916725 0.00000000 - 0.001828125059 0.00000000 - 0.001833333392 0.00000000 - 0.001838541725 0.00000000 - 0.001843750059 0.00000000 - 0.001848958392 0.00000000 - 0.001854166725 0.00000000 - 0.001859375059 0.00000000 - 0.001864583392 0.00000000 - 0.001869791725 0.00000000 - 0.001875000059 0.00000000 - 0.001880208392 0.00000000 - 0.001885416725 0.00000000 - 0.001890625059 0.00000000 - 0.001895833392 0.00000000 - 0.001901041725 0.00000000 - 0.001906250059 0.00000000 - 0.001911458392 0.00000000 - 0.001916666725 0.00000000 - 0.001921875059 0.00000000 - 0.001927083392 0.00000000 - 0.001932291725 0.00000000 - 0.001937500059 0.00000000 - 0.001942708392 0.00000000 - 0.001947916725 0.00000000 - 0.001953125059 0.00000000 - 0.001958333392 0.00000000 - 0.001963541725 0.00000000 - 0.001968750059 0.00000000 - 0.001973958392 0.00000000 - 0.001979166725 0.00000000 - 0.001984375059 0.00000000 - 0.001989583392 0.00000000 - 0.001994791725 0.00000000 - 0.002000000059 0.00000000 - 0.002005208392 0.00000000 - 0.002010416725 0.00000000 - 0.002015625059 0.00000000 - 0.002020833392 0.00000000 - 0.002026041725 0.00000000 - 0.002031250059 0.00000000 - 0.002036458392 0.00000000 - 0.002041666725 0.00000000 - 0.002046875059 0.00000000 - 0.002052083392 0.00000000 - 0.002057291725 0.00000000 - 0.002062500059 0.00000000 - 0.002067708392 0.00000000 - 0.002072916725 0.00000000 - 0.002078125059 0.00000000 - 0.002083333392 0.00000000 - 0.002088541725 0.00000000 - 0.002093750059 0.00000000 - 0.002098958392 0.00000000 - 0.002104166725 0.00000000 - 0.002109375059 0.00000000 - 0.002114583392 0.00000000 - 0.002119791725 0.00000000 - 0.002125000059 0.00000000 - 0.002130208392 0.00000000 - 0.002135416725 0.00000000 - 0.002140625059 0.00000000 - 0.002145833392 0.00000000 - 0.002151041725 0.00000000 - 0.002156250059 0.00000000 - 0.002161458392 0.00000000 - 0.002166666725 0.00000000 - 0.002171875059 0.00000000 - 0.002177083392 0.00000000 - 0.002182291725 0.00000000 - 0.002187500059 0.00000000 - 0.002192708392 0.00000000 - 0.002197916725 0.00000000 - 0.002203125059 0.00000000 - 0.002208333392 0.00000000 - 0.002213541725 0.00000000 - 0.002218750059 0.00000000 - 0.002223958392 0.00000000 - 0.002229166725 0.00000000 - 0.002234375059 0.00000000 - 0.002239583392 0.00000000 - 0.002244791725 0.00000000 - 0.002250000059 0.00000000 - 0.002255208392 0.00000000 - 0.002260416725 0.00000000 - 0.002265625059 0.00000000 - 0.002270833392 0.00000000 - 0.002276041725 0.00000000 - 0.002281250059 0.00000000 - 0.002286458392 0.00000000 - 0.002291666725 0.00000000 - 0.002296875059 0.00000000 - 0.002302083392 0.00000000 - 0.002307291725 0.00000000 - 0.002312500059 0.00000000 - 0.002317708392 0.00000000 - 0.002322916725 0.00000000 - 0.002328125059 0.00000000 - 0.002333333392 0.00000000 - 0.002338541725 0.00000000 - 0.002343750059 0.00000000 - 0.002348958392 0.00000000 - 0.002354166725 0.00000000 - 0.002359375059 0.00000000 - 0.002364583392 0.00000000 - 0.002369791725 0.00000000 - 0.002375000059 0.00000000 - 0.002380208392 0.00000000 - 0.002385416725 0.00000000 - 0.002390625059 0.00000000 - 0.002395833392 0.00000000 - 0.002401041725 0.00000000 - 0.002406250059 0.00000000 - 0.002411458392 0.00000000 - 0.002416666725 0.00000000 - 0.002421875059 0.00000000 - 0.002427083392 0.00000000 - 0.002432291725 0.00000000 - 0.002437500059 0.00000000 - 0.002442708392 0.00000000 - 0.002447916725 0.00000000 - 0.002453125059 0.00000000 - 0.002458333392 0.00000000 - 0.002463541725 0.00000000 - 0.002468750059 0.00000000 - 0.002473958392 0.00000000 - 0.002479166725 0.00000000 - 0.002484375059 0.00000000 - 0.002489583392 0.00000000 - 0.002494791725 0.00000000 - 0.002500000059 0.00000000 - 0.002505208392 0.00000000 - 0.002510416725 0.00000000 - 0.002515625059 0.00000000 - 0.002520833392 0.00000000 - 0.002526041725 0.00000000 - 0.002531250059 0.00000000 - 0.002536458392 0.00000000 - 0.002541666725 0.00000000 - 0.002546875059 0.00000000 - 0.002552083392 0.00000000 - 0.002557291725 0.00000000 - 0.002562500059 0.00000000 - 0.002567708392 0.00000000 - 0.002572916725 0.00000000 - 0.002578125059 0.00000000 - 0.002583333392 0.00000000 - 0.002588541725 0.00000000 - 0.002593750059 0.00000000 - 0.002598958392 0.00000000 - 0.002604166725 0.00000000 - 0.002609375059 0.00000000 - 0.002614583392 0.00000000 - 0.002619791725 0.00000000 - 0.002625000059 0.00000000 - 0.002630208392 0.00000000 - 0.002635416725 0.00000000 - 0.002640625059 0.00000000 - 0.002645833392 0.00000000 - 0.002651041725 0.00000000 - 0.002656250059 0.00000000 - 0.002661458392 0.00000000 - 0.002666666725 0.00000000 - 0.002671875059 0.00000000 - 0.002677083392 0.00000000 - 0.002682291725 0.00000000 - 0.002687500059 0.00000000 - 0.002692708392 0.00000000 - 0.002697916725 0.00000000 - 0.002703125059 0.00000000 - 0.002708333392 0.00000000 - 0.002713541725 0.00000000 - 0.002718750059 0.00000000 - 0.002723958392 0.00000000 - 0.002729166725 0.00000000 - 0.002734375059 0.00000000 - 0.002739583392 0.00000000 - 0.002744791725 0.00000000 - 0.002750000059 0.00000000 - 0.002755208392 0.00000000 - 0.002760416725 0.00000000 - 0.002765625059 0.00000000 - 0.002770833392 0.00000000 - 0.002776041725 0.00000000 - 0.002781250059 0.00000000 - 0.002786458392 0.00000000 - 0.002791666725 0.00000000 - 0.002796875059 0.00000000 - 0.002802083392 0.00000000 - 0.002807291725 0.00000000 - 0.002812500059 0.00000000 - 0.002817708392 0.00000000 - 0.002822916725 0.00000000 - 0.002828125059 0.00000000 - 0.002833333392 0.00000000 - 0.002838541725 0.00000000 - 0.002843750059 0.00000000 - 0.002848958392 0.00000000 - 0.002854166725 0.00000000 - 0.002859375059 0.00000000 - 0.002864583392 0.00000000 - 0.002869791725 0.00000000 - 0.002875000059 0.00000000 - 0.002880208392 0.00000000 - 0.002885416725 0.00000000 - 0.002890625059 0.00000000 - 0.002895833392 0.00000000 - 0.002901041725 0.00000000 - 0.002906250059 0.00000000 - 0.002911458392 0.00000000 - 0.002916666725 0.00000000 - 0.002921875059 0.00000000 - 0.002927083392 0.00000000 - 0.002932291725 0.00000000 - 0.002937500059 0.00000000 - 0.002942708392 0.00000000 - 0.002947916725 0.00000000 - 0.002953125059 0.00000000 - 0.002958333392 0.00000000 - 0.002963541725 0.00000000 - 0.002968750059 0.00000000 - 0.002973958392 0.00000000 - 0.002979166725 0.00000000 - 0.002984375059 0.00000000 - 0.002989583392 0.00000000 - 0.002994791725 0.00000000 - 0.003000000059 0.00000000 - 0.003005208392 0.00000000 - 0.003010416725 0.00000000 - 0.003015625059 0.00000000 - 0.003020833392 0.00000000 - 0.003026041725 0.00000000 - 0.003031250059 0.00000000 - 0.003036458392 0.00000000 - 0.003041666725 0.00000000 - 0.003046875059 0.00000000 - 0.003052083392 0.00000000 - 0.003057291725 0.00000000 - 0.003062500059 0.00000000 - 0.003067708392 0.00000000 - 0.003072916725 0.00000000 - 0.003078125059 0.00000000 - 0.003083333392 0.00000000 - 0.003088541725 0.00000000 - 0.003093750059 0.00000000 - 0.003098958392 0.00000000 - 0.003104166725 0.00000000 - 0.003109375059 0.00000000 - 0.003114583392 0.00000000 - 0.003119791725 0.00000000 - 0.003125000059 0.00000000 - 0.003130208392 0.00000000 - 0.003135416725 0.00000000 - 0.003140625059 0.00000000 - 0.003145833392 0.00000000 - 0.003151041725 0.00000000 - 0.003156250059 0.00000000 - 0.003161458392 0.00000000 - 0.003166666725 0.00000000 - 0.003171875059 0.00000000 - 0.003177083392 0.00000000 - 0.003182291725 0.00000000 - 0.003187500059 0.00000000 - 0.003192708392 0.00000000 - 0.003197916725 0.00000000 - 0.003203125059 0.00000000 - 0.003208333392 0.00000000 - 0.003213541725 0.00000000 - 0.003218750059 0.00000000 - 0.003223958392 0.00000000 - 0.003229166725 0.00000000 - 0.003234375059 0.00000000 - 0.003239583392 0.00000000 - 0.003244791725 0.00000000 - 0.003250000059 0.00000000 - 0.003255208392 0.00000000 - 0.003260416725 0.00000000 - 0.003265625059 0.00000000 - 0.003270833392 0.00000000 - 0.003276041725 0.00000000 - 0.003281250059 0.00000000 - 0.003286458392 0.00000000 - 0.003291666725 0.00000000 - 0.003296875059 0.00000000 - 0.003302083392 0.00000000 - 0.003307291725 0.00000000 - 0.003312500059 0.00000000 - 0.003317708392 0.00000000 - 0.003322916725 0.00000000 - 0.003328125059 0.00000000 - 0.003333333392 0.00000000 - 0.003338541725 0.00000000 - 0.003343750059 0.00000000 - 0.003348958392 0.00000000 - 0.003354166725 0.00000000 - 0.003359375059 0.00000000 - 0.003364583392 0.00000000 - 0.003369791725 0.00000000 - 0.003375000059 0.00000000 - 0.003380208392 0.00000000 - 0.003385416725 0.00000000 - 0.003390625059 0.00000000 - 0.003395833392 0.00000000 - 0.003401041725 0.00000000 - 0.003406250059 0.00000000 - 0.003411458392 0.00000000 - 0.003416666725 0.00000000 - 0.003421875059 0.00000000 - 0.003427083392 0.00000000 - 0.003432291725 0.00000000 - 0.003437500059 0.00000000 - 0.003442708392 0.00000000 - 0.003447916725 0.00000000 - 0.003453125059 0.00000000 - 0.003458333392 0.00000000 - 0.003463541725 0.00000000 - 0.003468750059 0.00000000 - 0.003473958392 0.00000000 - 0.003479166725 0.00000000 - 0.003484375059 0.00000000 - 0.003489583392 0.00000000 - 0.003494791725 0.00000000 - 0.003500000059 0.00000000 - 0.003505208392 0.00000000 - 0.003510416725 0.00000000 - 0.003515625059 0.00000000 - 0.003520833392 0.00000000 - 0.003526041725 0.00000000 - 0.003531250059 0.00000000 - 0.003536458392 0.00000000 - 0.003541666725 0.00000000 - 0.003546875059 0.00000000 - 0.003552083392 0.00000000 - 0.003557291725 0.00000000 - 0.003562500059 0.00000000 - 0.003567708392 0.00000000 - 0.003572916725 0.00000000 - 0.003578125059 0.00000000 - 0.003583333392 0.00000000 - 0.003588541725 0.00000000 - 0.003593750059 0.00000000 - 0.003598958392 0.00000000 - 0.003604166725 0.00000000 - 0.003609375059 0.00000000 - 0.003614583392 0.00000000 - 0.003619791725 0.00000000 - 0.003625000059 0.00000000 - 0.003630208392 0.00000000 - 0.003635416725 0.00000000 - 0.003640625059 0.00000000 - 0.003645833392 0.00000000 - 0.003651041725 0.00000000 - 0.003656250059 0.00000000 - 0.003661458392 0.00000000 - 0.003666666725 0.00000000 - 0.003671875059 0.00000000 - 0.003677083392 0.00000000 - 0.003682291725 0.00000000 - 0.003687500059 0.00000000 - 0.003692708392 0.00000000 - 0.003697916725 0.00000000 - 0.003703125059 0.00000000 - 0.003708333392 0.00000000 - 0.003713541725 0.00000000 - 0.003718750059 0.00000000 - 0.003723958392 0.00000000 - 0.003729166725 0.00000000 - 0.003734375059 0.00000000 - 0.003739583392 0.00000000 - 0.003744791725 0.00000000 - 0.003750000059 0.00000000 - 0.003755208392 0.00000000 - 0.003760416725 0.00000000 - 0.003765625059 0.00000000 - 0.003770833392 0.00000000 - 0.003776041725 0.00000000 - 0.003781250059 0.00000000 - 0.003786458392 0.00000000 - 0.003791666725 0.00000000 - 0.003796875059 0.00000000 - 0.003802083392 0.00000000 - 0.003807291725 0.00000000 - 0.003812500059 0.00000000 - 0.003817708392 0.00000000 - 0.003822916725 0.00000000 - 0.003828125059 0.00000000 - 0.003833333392 0.00000000 - 0.003838541725 0.00000000 - 0.003843750059 0.00000000 - 0.003848958392 0.00000000 - 0.003854166725 0.00000000 - 0.003859375059 0.00000000 - 0.003864583392 0.00000000 - 0.003869791725 0.00000000 - 0.003875000059 0.00000000 - 0.003880208392 0.00000000 - 0.003885416725 0.00000000 - 0.003890625059 0.00000000 - 0.003895833392 0.00000000 - 0.003901041725 0.00000000 - 0.003906250059 0.00000000 - 0.003911458392 0.00000000 - 0.003916666725 0.00000000 - 0.003921875059 0.00000000 - 0.003927083392 0.00000000 - 0.003932291725 0.00000000 - 0.003937500059 0.00000000 - 0.003942708392 0.00000000 - 0.003947916725 0.00000000 - 0.003953125059 0.00000000 - 0.003958333392 0.00000000 - 0.003963541725 0.00000000 - 0.003968750059 0.00000000 - 0.003973958392 0.00000000 - 0.003979166725 0.00000000 - 0.003984375059 0.00000000 - 0.003989583392 0.00000000 - 0.003994791725 0.00000000 - 0.004000000059 0.00000000 - 0.004005208392 0.00000000 - 0.004010416725 0.00000000 - 0.004015625059 0.00000000 - 0.004020833392 0.00000000 - 0.004026041725 0.00000000 - 0.004031250059 0.00000000 - 0.004036458392 0.00000000 - 0.004041666725 0.00000000 - 0.004046875059 0.00000000 - 0.004052083392 0.00000000 - 0.004057291725 0.00000000 - 0.004062500059 0.00000000 - 0.004067708392 0.00000000 - 0.004072916725 0.00000000 - 0.004078125059 0.00000000 - 0.004083333392 0.00000000 - 0.004088541725 0.00000000 - 0.004093750059 0.00000000 - 0.004098958392 0.00000000 - 0.004104166725 0.00000000 - 0.004109375059 0.00000000 - 0.004114583392 0.00000000 - 0.004119791725 0.00000000 - 0.004125000059 0.00000000 - 0.004130208392 0.00000000 - 0.004135416725 0.00000000 - 0.004140625059 0.00000000 - 0.004145833392 0.00000000 - 0.004151041725 0.00000000 - 0.004156250059 0.00000000 - 0.004161458392 0.00000000 - 0.004166666725 0.00000000 - 0.004171875059 0.00000000 - 0.004177083392 0.00000000 - 0.004182291725 0.00000000 - 0.004187500059 0.00000000 - 0.004192708392 0.00000000 - 0.004197916725 0.00000000 - 0.004203125059 0.00000000 - 0.004208333392 0.00000000 - 0.004213541725 0.00000000 - 0.004218750059 0.00000000 - 0.004223958392 0.00000000 - 0.004229166725 0.00000000 - 0.004234375059 0.00000000 - 0.004239583392 0.00000000 - 0.004244791725 0.00000000 - 0.004250000059 0.00000000 - 0.004255208392 0.00000000 - 0.004260416725 0.00000000 - 0.004265625059 0.00000000 - 0.004270833392 0.00000000 - 0.004276041725 0.00000000 - 0.004281250059 0.00000000 - 0.004286458392 0.00000000 - 0.004291666725 0.00000000 - 0.004296875059 0.00000000 - 0.004302083392 0.00000000 - 0.004307291725 0.00000000 - 0.004312500059 0.00000000 - 0.004317708392 0.00000000 - 0.004322916725 0.00000000 - 0.004328125059 0.00000000 - 0.004333333392 0.00000000 - 0.004338541725 0.00000000 - 0.004343750059 0.00000000 - 0.004348958392 0.00000000 - 0.004354166725 0.00000000 - 0.004359375059 0.00000000 - 0.004364583392 0.00000000 - 0.004369791725 0.00000000 - 0.004375000059 0.00000000 - 0.004380208392 0.00000000 - 0.004385416725 0.00000000 - 0.004390625059 0.00000000 - 0.004395833392 0.00000000 - 0.004401041725 0.00000000 - 0.004406250059 0.00000000 - 0.004411458392 0.00000000 - 0.004416666725 0.00000000 - 0.004421875059 0.00000000 - 0.004427083392 0.00000000 - 0.004432291725 0.00000000 - 0.004437500059 0.00000000 - 0.004442708392 0.00000000 - 0.004447916725 0.00000000 - 0.004453125059 0.00000000 - 0.004458333392 0.00000000 - 0.004463541725 0.00000000 - 0.004468750059 0.00000000 - 0.004473958392 0.00000000 - 0.004479166725 0.00000000 - 0.004484375059 0.00000000 - 0.004489583392 0.00000000 - 0.004494791725 0.00000000 - 0.004500000059 0.00000000 - 0.004505208392 0.00000000 - 0.004510416725 0.00000000 - 0.004515625059 0.00000000 - 0.004520833392 0.00000000 - 0.004526041725 0.00000000 - 0.004531250059 0.00000000 - 0.004536458392 0.00000000 - 0.004541666725 0.00000000 - 0.004546875059 0.00000000 - 0.004552083392 0.00000000 - 0.004557291725 0.00000000 - 0.004562500059 0.00000000 - 0.004567708392 0.00000000 - 0.004572916725 0.00000000 - 0.004578125059 0.00000000 - 0.004583333392 0.00000000 - 0.004588541725 0.00000000 - 0.004593750059 0.00000000 - 0.004598958392 0.00000000 - 0.004604166725 0.00000000 - 0.004609375059 0.00000000 - 0.004614583392 0.00000000 - 0.004619791725 0.00000000 - 0.004625000059 0.00000000 - 0.004630208392 0.00000000 - 0.004635416725 0.00000000 - 0.004640625059 0.00000000 - 0.004645833392 0.00000000 - 0.004651041725 0.00000000 - 0.004656250059 0.00000000 - 0.004661458392 0.00000000 - 0.004666666725 0.00000000 - 0.004671875059 0.00000000 - 0.004677083392 0.00000000 - 0.004682291725 0.00000000 - 0.004687500059 0.00000000 - 0.004692708392 0.00000000 - 0.004697916725 0.00000000 - 0.004703125059 0.00000000 - 0.004708333392 0.00000000 - 0.004713541725 0.00000000 - 0.004718750059 0.00000000 - 0.004723958392 0.00000000 - 0.004729166725 0.00000000 - 0.004734375059 0.00000000 - 0.004739583392 0.00000000 - 0.004744791725 0.00000000 - 0.004750000059 0.00000000 - 0.004755208392 0.00000000 - 0.004760416725 0.00000000 - 0.004765625059 0.00000000 - 0.004770833392 0.00000000 - 0.004776041725 0.00000000 - 0.004781250059 0.00000000 - 0.004786458392 0.00000000 - 0.004791666725 0.00000000 - 0.004796875059 0.00000000 - 0.004802083392 0.00000000 - 0.004807291725 0.00000000 - 0.004812500059 0.00000000 - 0.004817708392 0.00000000 - 0.004822916725 0.00000000 - 0.004828125059 0.00000000 - 0.004833333392 0.00000000 - 0.004838541725 0.00000000 - 0.004843750059 0.00000000 - 0.004848958392 0.00000000 - 0.004854166725 0.00000000 - 0.004859375059 0.00000000 - 0.004864583392 0.00000000 - 0.004869791725 0.00000000 - 0.004875000059 0.00000000 - 0.004880208392 0.00000000 - 0.004885416725 0.00000000 - 0.004890625059 0.00000000 - 0.004895833392 0.00000000 - 0.004901041725 0.00000000 - 0.004906250059 0.00000000 - 0.004911458392 0.00000000 - 0.004916666725 0.00000000 - 0.004921875059 0.00000000 - 0.004927083392 0.00000000 - 0.004932291725 0.00000000 - 0.004937500059 0.00000000 - 0.004942708392 0.00000000 - 0.004947916725 0.00000000 - 0.004953125059 0.00000000 - 0.004958333392 0.00000000 - 0.004963541725 0.00000000 - 0.004968750059 0.00000000 - 0.004973958392 0.00000000 - 0.004979166725 0.00000000 - 0.004984375059 0.00000000 - 0.004989583392 0.00000000 - 0.004994791725 0.00000000 - 0.005000000059 0.00000000 - 0.005005208392 0.00000000 - 0.005010416725 0.00000000 - 0.005015625059 0.00000000 - 0.005020833392 0.00000000 - 0.005026041725 0.00000000 - 0.005031250059 0.00000000 - 0.005036458392 0.00000000 - 0.005041666725 0.00000000 - 0.005046875059 0.00000000 - 0.005052083392 0.00000000 - 0.005057291725 0.00000000 - 0.005062500059 0.00000000 - 0.005067708392 0.00000000 - 0.005072916725 0.00000000 - 0.005078125059 0.00000000 - 0.005083333392 0.00000000 - 0.005088541725 0.00000000 - 0.005093750059 0.00000000 - 0.005098958392 0.00000000 - 0.005104166725 0.00000000 - 0.005109375059 0.00000000 - 0.005114583392 0.00000000 - 0.005119791725 0.00000000 - 0.005125000059 0.00000000 - 0.005130208392 0.00000000 - 0.005135416725 0.00000000 - 0.005140625059 0.00000000 - 0.005145833392 0.00000000 - 0.005151041725 0.00000000 - 0.005156250059 0.00000000 - 0.005161458392 0.00000000 - 0.005166666725 0.00000000 - 0.005171875059 0.00000000 - 0.005177083392 0.00000000 - 0.005182291725 0.00000000 - 0.005187500059 0.00000000 - 0.005192708392 0.00000000 - 0.005197916725 0.00000000 - 0.005203125059 0.00000000 - 0.005208333392 0.00000000 - 0.005213541725 0.00000000 - 0.005218750059 0.00000000 - 0.005223958392 0.00000000 - 0.005229166725 0.00000000 - 0.005234375059 0.00000000 - 0.005239583392 0.00000000 - 0.005244791725 0.00000000 - 0.005250000059 0.00000000 - 0.005255208392 0.00000000 - 0.005260416725 0.00000000 - 0.005265625059 0.00000000 - 0.005270833392 0.00000000 - 0.005276041725 0.00000000 - 0.005281250059 0.00000000 - 0.005286458392 0.00000000 - 0.005291666725 0.00000000 - 0.005296875059 0.00000000 - 0.005302083392 0.00000000 - 0.005307291725 0.00000000 - 0.005312500059 0.00000000 - 0.005317708392 0.00000000 - 0.005322916725 0.00000000 - 0.005328125059 0.00000000 - 0.005333333392 0.00000000 - 0.005338541725 0.00000000 - 0.005343750059 0.00000000 - 0.005348958392 0.00000000 - 0.005354166725 0.00000000 - 0.005359375059 0.00000000 - 0.005364583392 0.00000000 - 0.005369791725 0.00000000 - 0.005375000059 0.00000000 - 0.005380208392 0.00000000 - 0.005385416725 0.00000000 - 0.005390625059 0.00000000 - 0.005395833392 0.00000000 - 0.005401041725 0.00000000 - 0.005406250059 0.00000000 - 0.005411458392 0.00000000 - 0.005416666725 0.00000000 - 0.005421875059 0.00000000 - 0.005427083392 0.00000000 - 0.005432291725 0.00000000 - 0.005437500059 0.00000000 - 0.005442708392 0.00000000 - 0.005447916725 0.00000000 - 0.005453125059 0.00000000 - 0.005458333392 0.00000000 - 0.005463541725 0.00000000 - 0.005468750059 0.00000000 - 0.005473958392 0.00000000 - 0.005479166725 0.00000000 - 0.005484375059 0.00000000 - 0.005489583392 0.00000000 - 0.005494791725 0.00000000 - 0.005500000059 0.00000000 - 0.005505208392 0.00000000 - 0.005510416725 0.00000000 - 0.005515625059 0.00000000 - 0.005520833392 0.00000000 - 0.005526041725 0.00000000 - 0.005531250059 0.00000000 - 0.005536458392 0.00000000 - 0.005541666725 0.00000000 - 0.005546875059 0.00000000 - 0.005552083392 0.00000000 - 0.005557291725 0.00000000 - 0.005562500059 0.00000000 - 0.005567708392 0.00000000 - 0.005572916725 0.00000000 - 0.005578125059 0.00000000 - 0.005583333392 0.00000000 - 0.005588541725 0.00000000 - 0.005593750059 0.00000000 - 0.005598958392 0.00000000 - 0.005604166725 0.00000000 - 0.005609375059 0.00000000 - 0.005614583392 0.00000000 - 0.005619791725 0.00000000 - 0.005625000059 0.00000000 - 0.005630208392 0.00000000 - 0.005635416725 0.00000000 - 0.005640625059 0.00000000 - 0.005645833392 0.00000000 - 0.005651041725 0.00000000 - 0.005656250059 0.00000000 - 0.005661458392 0.00000000 - 0.005666666725 0.00000000 - 0.005671875059 0.00000000 - 0.005677083392 0.00000000 - 0.005682291725 0.00000000 - 0.005687500059 0.00000000 - 0.005692708392 0.00000000 - 0.005697916725 0.00000000 - 0.005703125059 0.00000000 - 0.005708333392 0.00000000 - 0.005713541725 0.00000000 - 0.005718750059 0.00000000 - 0.005723958392 0.00000000 - 0.005729166725 0.00000000 - 0.005734375059 0.00000000 - 0.005739583392 0.00000000 - 0.005744791725 0.00000000 - 0.005750000059 0.00000000 - 0.005755208392 0.00000000 - 0.005760416725 0.00000000 - 0.005765625059 0.00000000 - 0.005770833392 0.00000000 - 0.005776041725 0.00000000 - 0.005781250059 0.00000000 - 0.005786458392 0.00000000 - 0.005791666725 0.00000000 - 0.005796875059 0.00000000 - 0.005802083392 0.00000000 - 0.005807291725 0.00000000 - 0.005812500059 0.00000000 - 0.005817708392 0.00000000 - 0.005822916725 0.00000000 - 0.005828125059 0.00000000 - 0.005833333392 0.00000000 - 0.005838541725 0.00000000 - 0.005843750059 0.00000000 - 0.005848958392 0.00000000 - 0.005854166725 0.00000000 - 0.005859375059 0.00000000 - 0.005864583392 0.00000000 - 0.005869791725 0.00000000 - 0.005875000059 0.00000000 - 0.005880208392 0.00000000 - 0.005885416725 0.00000000 - 0.005890625059 0.00000000 - 0.005895833392 0.00000000 - 0.005901041725 0.00000000 - 0.005906250059 0.00000000 - 0.005911458392 0.00000000 - 0.005916666725 0.00000000 - 0.005921875059 0.00000000 - 0.005927083392 0.00000000 - 0.005932291725 0.00000000 - 0.005937500059 0.00000000 - 0.005942708392 0.00000000 - 0.005947916725 0.00000000 - 0.005953125059 0.00000000 - 0.005958333392 0.00000000 - 0.005963541725 0.00000000 - 0.005968750059 0.00000000 - 0.005973958392 0.00000000 - 0.005979166725 0.00000000 - 0.005984375059 0.00000000 - 0.005989583392 0.00000000 - 0.005994791725 0.00000000 - 0.006000000059 0.00000000 - 0.006005208392 0.00000000 - 0.006010416725 0.00000000 - 0.006015625059 0.00000000 - 0.006020833392 0.00000000 - 0.006026041725 0.00000000 - 0.006031250059 0.00000000 - 0.006036458392 0.00000000 - 0.006041666725 0.00000000 - 0.006046875059 0.00000000 - 0.006052083392 0.00000000 - 0.006057291725 0.00000000 - 0.006062500059 0.00000000 - 0.006067708392 0.00000000 - 0.006072916725 0.00000000 - 0.006078125059 0.00000000 - 0.006083333392 0.00000000 - 0.006088541725 0.00000000 - 0.006093750059 0.00000000 - 0.006098958392 0.00000000 - 0.006104166725 0.00000000 - 0.006109375059 0.00000000 - 0.006114583392 0.00000000 - 0.006119791725 0.00000000 - 0.006125000059 0.00000000 - 0.006130208392 0.00000000 - 0.006135416725 0.00000000 - 0.006140625059 0.00000000 - 0.006145833392 0.00000000 - 0.006151041725 0.00000000 - 0.006156250059 0.00000000 - 0.006161458392 0.00000000 - 0.006166666725 0.00000000 - 0.006171875059 0.00000000 - 0.006177083392 0.00000000 - 0.006182291725 0.00000000 - 0.006187500059 0.00000000 - 0.006192708392 0.00000000 - 0.006197916725 0.00000000 - 0.006203125059 0.00000000 - 0.006208333392 0.00000000 - 0.006213541725 0.00000000 - 0.006218750059 0.00000000 - 0.006223958392 0.00000000 - 0.006229166725 0.00000000 - 0.006234375059 0.00000000 - 0.006239583392 0.00000000 - 0.006244791725 0.00000000 - 0.006250000059 0.00000000 - 0.006255208392 0.00000000 - 0.006260416725 0.00000000 - 0.006265625059 0.00000000 - 0.006270833392 0.00000000 - 0.006276041725 0.00000000 - 0.006281250059 0.00000000 - 0.006286458392 0.00000000 - 0.006291666725 0.00000000 - 0.006296875059 0.00000000 - 0.006302083392 0.00000000 - 0.006307291725 0.00000000 - 0.006312500059 0.00000000 - 0.006317708392 0.00000000 - 0.006322916725 0.00000000 - 0.006328125059 0.00000000 - 0.006333333392 0.00000000 - 0.006338541725 0.00000000 - 0.006343750059 0.00000000 - 0.006348958392 0.00000000 - 0.006354166725 0.00000000 - 0.006359375059 0.00000000 - 0.006364583392 0.00000000 - 0.006369791725 0.00000000 - 0.006375000059 0.00000000 - 0.006380208392 0.00000000 - 0.006385416725 0.00000000 - 0.006390625059 0.00000000 - 0.006395833392 0.00000000 - 0.006401041725 0.00000000 - 0.006406250059 0.00000000 - 0.006411458392 0.00000000 - 0.006416666725 0.00000000 - 0.006421875059 0.00000000 - 0.006427083392 0.00000000 - 0.006432291725 0.00000000 - 0.006437500059 0.00000000 - 0.006442708392 0.00000000 - 0.006447916725 0.00000000 - 0.006453125059 0.00000000 - 0.006458333392 0.00000000 - 0.006463541725 0.00000000 - 0.006468750059 0.00000000 - 0.006473958392 0.00000000 - 0.006479166725 0.00000000 - 0.006484375059 0.00000000 - 0.006489583392 0.00000000 - 0.006494791725 0.00000000 - 0.006500000059 0.00000000 - 0.006505208392 0.00000000 - 0.006510416725 0.00000000 - 0.006515625059 0.00000000 - 0.006520833392 0.00000000 - 0.006526041725 0.00000000 - 0.006531250059 0.00000000 - 0.006536458392 0.00000000 - 0.006541666725 0.00000000 - 0.006546875059 0.00000000 - 0.006552083392 0.00000000 - 0.006557291725 0.00000000 - 0.006562500059 0.00000000 - 0.006567708392 0.00000000 - 0.006572916725 0.00000000 - 0.006578125059 0.00000000 - 0.006583333392 0.00000000 - 0.006588541725 0.00000000 - 0.006593750059 0.00000000 - 0.006598958392 0.00000000 - 0.006604166725 0.00000000 - 0.006609375059 0.00000000 - 0.006614583392 0.00000000 - 0.006619791725 0.00000000 - 0.006625000059 0.00000000 - 0.006630208392 0.00000000 - 0.006635416725 0.00000000 - 0.006640625059 0.00000000 - 0.006645833392 0.00000000 - 0.006651041725 0.00000000 - 0.006656250059 0.00000000 - 0.006661458392 0.00000000 - 0.006666666725 0.00000000 - 0.006671875059 0.00000000 - 0.006677083392 0.00000000 - 0.006682291725 0.00000000 - 0.006687500059 0.00000000 - 0.006692708392 0.00000000 - 0.006697916725 0.00000000 - 0.006703125059 0.00000000 - 0.006708333392 0.00000000 - 0.006713541725 0.00000000 - 0.006718750059 0.00000000 - 0.006723958392 0.00000000 - 0.006729166725 0.00000000 - 0.006734375059 0.00000000 - 0.006739583392 0.00000000 - 0.006744791725 0.00000000 - 0.006750000059 0.00000000 - 0.006755208392 0.00000000 - 0.006760416725 0.00000000 - 0.006765625059 0.00000000 - 0.006770833392 0.00000000 - 0.006776041725 0.00000000 - 0.006781250059 0.00000000 - 0.006786458392 0.00000000 - 0.006791666725 0.00000000 - 0.006796875059 0.00000000 - 0.006802083392 0.00000000 - 0.006807291725 0.00000000 - 0.006812500059 0.00000000 - 0.006817708392 0.00000000 - 0.006822916725 0.00000000 - 0.006828125059 0.00000000 - 0.006833333392 0.00000000 - 0.006838541725 0.00000000 - 0.006843750059 0.00000000 - 0.006848958392 0.00000000 - 0.006854166725 0.00000000 - 0.006859375059 0.00000000 - 0.006864583392 0.00000000 - 0.006869791725 0.00000000 - 0.006875000059 0.00000000 - 0.006880208392 0.00000000 - 0.006885416725 0.00000000 - 0.006890625059 0.00000000 - 0.006895833392 0.00000000 - 0.006901041725 0.00000000 - 0.006906250059 0.00000000 - 0.006911458392 0.00000000 - 0.006916666725 0.00000000 - 0.006921875059 0.00000000 - 0.006927083392 0.00000000 - 0.006932291725 0.00000000 - 0.006937500059 0.00000000 - 0.006942708392 0.00000000 - 0.006947916725 0.00000000 - 0.006953125059 0.00000000 - 0.006958333392 0.00000000 - 0.006963541725 0.00000000 - 0.006968750059 0.00000000 - 0.006973958392 0.00000000 - 0.006979166725 0.00000000 - 0.006984375059 0.00000000 - 0.006989583392 0.00000000 - 0.006994791725 0.00000000 - 0.007000000059 0.00000000 - 0.007005208392 0.00000000 - 0.007010416725 0.00000000 - 0.007015625059 0.00000000 - 0.007020833392 0.00000000 - 0.007026041725 0.00000000 - 0.007031250059 0.00000000 - 0.007036458392 0.00000000 - 0.007041666725 0.00000000 - 0.007046875059 0.00000000 - 0.007052083392 0.00000000 - 0.007057291725 0.00000000 - 0.007062500059 0.00000000 - 0.007067708392 0.00000000 - 0.007072916725 0.00000000 - 0.007078125059 0.00000000 - 0.007083333392 0.00000000 - 0.007088541725 0.00000000 - 0.007093750059 0.00000000 - 0.007098958392 0.00000000 - 0.007104166725 0.00000000 - 0.007109375059 0.00000000 - 0.007114583392 0.00000000 - 0.007119791725 0.00000000 - 0.007125000059 0.00000000 - 0.007130208392 0.00000000 - 0.007135416725 0.00000000 - 0.007140625059 0.00000000 - 0.007145833392 0.00000000 - 0.007151041725 0.00000000 - 0.007156250059 0.00000000 - 0.007161458392 0.00000000 - 0.007166666725 0.00000000 - 0.007171875059 0.00000000 - 0.007177083392 0.00000000 - 0.007182291725 0.00000000 - 0.007187500059 0.00000000 - 0.007192708392 0.00000000 - 0.007197916725 0.00000000 - 0.007203125059 0.00000000 - 0.007208333392 0.00000000 - 0.007213541725 0.00000000 - 0.007218750059 0.00000000 - 0.007223958392 0.00000000 - 0.007229166725 0.00000000 - 0.007234375059 0.00000000 - 0.007239583392 0.00000000 - 0.007244791725 0.00000000 - 0.007250000059 0.00000000 - 0.007255208392 0.00000000 - 0.007260416725 0.00000000 - 0.007265625059 0.00000000 - 0.007270833392 0.00000000 - 0.007276041725 0.00000000 - 0.007281250059 0.00000000 - 0.007286458392 0.00000000 - 0.007291666725 0.00000000 - 0.007296875059 0.00000000 - 0.007302083392 0.00000000 - 0.007307291725 0.00000000 - 0.007312500059 0.00000000 - 0.007317708392 0.00000000 - 0.007322916725 0.00000000 - 0.007328125059 0.00000000 - 0.007333333392 0.00000000 - 0.007338541725 0.00000000 - 0.007343750059 0.00000000 - 0.007348958392 0.00000000 - 0.007354166725 0.00000000 - 0.007359375059 0.00000000 - 0.007364583392 0.00000000 - 0.007369791725 0.00000000 - 0.007375000059 0.00000000 - 0.007380208392 0.00000000 - 0.007385416725 0.00000000 - 0.007390625059 0.00000000 - 0.007395833392 0.00000000 - 0.007401041725 0.00000000 - 0.007406250059 0.00000000 - 0.007411458392 0.00000000 - 0.007416666725 0.00000000 - 0.007421875059 0.00000000 - 0.007427083392 0.00000000 - 0.007432291725 0.00000000 - 0.007437500059 0.00000000 - 0.007442708392 0.00000000 - 0.007447916725 0.00000000 - 0.007453125059 0.00000000 - 0.007458333392 0.00000000 - 0.007463541725 0.00000000 - 0.007468750059 0.00000000 - 0.007473958392 0.00000000 - 0.007479166725 0.00000000 - 0.007484375059 0.00000000 - 0.007489583392 0.00000000 - 0.007494791725 0.00000000 - 0.007500000059 0.00000000 - 0.007505208392 0.00000000 - 0.007510416725 0.00000000 - 0.007515625059 0.00000000 - 0.007520833392 0.00000000 - 0.007526041725 0.00000000 - 0.007531250059 0.00000000 - 0.007536458392 0.00000000 - 0.007541666725 0.00000000 - 0.007546875059 0.00000000 - 0.007552083392 0.00000000 - 0.007557291725 0.00000000 - 0.007562500059 0.00000000 - 0.007567708392 0.00000000 - 0.007572916725 0.00000000 - 0.007578125059 0.00000000 - 0.007583333392 0.00000000 - 0.007588541725 0.00000000 - 0.007593750059 0.00000000 - 0.007598958392 0.00000000 - 0.007604166725 0.00000000 - 0.007609375059 0.00000000 - 0.007614583392 0.00000000 - 0.007619791725 0.00000000 - 0.007625000059 0.00000000 - 0.007630208392 0.00000000 - 0.007635416725 0.00000000 - 0.007640625059 0.00000000 - 0.007645833392 0.00000000 - 0.007651041725 0.00000000 - 0.007656250059 0.00000000 - 0.007661458392 0.00000000 - 0.007666666725 0.00000000 - 0.007671875059 0.00000000 - 0.007677083392 0.00000000 - 0.007682291725 0.00000000 - 0.007687500059 0.00000000 - 0.007692708392 0.00000000 - 0.007697916725 0.00000000 - 0.007703125059 0.00000000 - 0.007708333392 0.00000000 - 0.007713541725 0.00000000 - 0.007718750059 0.00000000 - 0.007723958392 0.00000000 - 0.007729166725 0.00000000 - 0.007734375059 0.00000000 - 0.007739583392 0.00000000 - 0.007744791725 0.00000000 - 0.007750000059 0.00000000 - 0.007755208392 0.00000000 - 0.007760416725 0.00000000 - 0.007765625059 0.00000000 - 0.007770833392 0.00000000 - 0.007776041725 0.00000000 - 0.007781250059 0.00000000 - 0.007786458392 0.00000000 - 0.007791666725 0.00000000 - 0.007796875059 0.00000000 - 0.007802083392 0.00000000 - 0.007807291725 0.00000000 - 0.007812500059 0.00000000 - 0.007817708392 0.00000000 - 0.007822916725 0.00000000 - 0.007828125059 0.00000000 - 0.007833333392 0.00000000 - 0.007838541725 0.00000000 - 0.007843750059 0.00000000 - 0.007848958392 0.00000000 - 0.007854166725 0.00000000 - 0.007859375059 0.00000000 - 0.007864583392 0.00000000 - 0.007869791725 0.00000000 - 0.007875000059 0.00000000 - 0.007880208392 0.00000000 - 0.007885416725 0.00000000 - 0.007890625059 0.00000000 - 0.007895833392 0.00000000 - 0.007901041725 0.00000000 - 0.007906250059 0.00000000 - 0.007911458392 0.00000000 - 0.007916666725 0.00000000 - 0.007921875059 0.00000000 - 0.007927083392 0.00000000 - 0.007932291725 0.00000000 - 0.007937500059 0.00000000 - 0.007942708392 0.00000000 - 0.007947916725 0.00000000 - 0.007953125059 0.00000000 - 0.007958333392 0.00000000 - 0.007963541725 0.00000000 - 0.007968750059 0.00000000 - 0.007973958392 0.00000000 - 0.007979166725 0.00000000 - 0.007984375059 0.00000000 - 0.007989583392 0.00000000 - 0.007994791725 0.00000000 - 0.008000000059 0.00000000 - 0.008005208392 0.00000000 - 0.008010416725 0.00000000 - 0.008015625059 0.00000000 - 0.008020833392 0.00000000 - 0.008026041725 0.00000000 - 0.008031250059 0.00000000 - 0.008036458392 0.00000000 - 0.008041666725 0.00000000 - 0.008046875059 0.00000000 - 0.008052083392 0.00000000 - 0.008057291725 0.00000000 - 0.008062500059 0.00000000 - 0.008067708392 0.00000000 - 0.008072916725 0.00000000 - 0.008078125059 0.00000000 - 0.008083333392 0.00000000 - 0.008088541725 0.00000000 - 0.008093750059 0.00000000 - 0.008098958392 0.00000000 - 0.008104166725 0.00000000 - 0.008109375059 0.00000000 - 0.008114583392 0.00000000 - 0.008119791725 0.00000000 - 0.008125000059 0.00000000 - 0.008130208392 0.00000000 - 0.008135416725 0.00000000 - 0.008140625059 0.00000000 - 0.008145833392 0.00000000 - 0.008151041725 0.00000000 - 0.008156250059 0.00000000 - 0.008161458392 0.00000000 - 0.008166666725 0.00000000 - 0.008171875059 0.00000000 - 0.008177083392 0.00000000 - 0.008182291725 0.00000000 - 0.008187500059 0.00000000 - 0.008192708392 0.00000000 - 0.008197916725 0.00000000 - 0.008203125059 0.00000000 - 0.008208333392 0.00000000 - 0.008213541725 0.00000000 - 0.008218750059 0.00000000 - 0.008223958392 0.00000000 - 0.008229166725 0.00000000 - 0.008234375059 0.00000000 - 0.008239583392 0.00000000 - 0.008244791725 0.00000000 - 0.008250000059 0.00000000 - 0.008255208392 0.00000000 - 0.008260416725 0.00000000 - 0.008265625059 0.00000000 - 0.008270833392 0.00000000 - 0.008276041725 0.00000000 - 0.008281250059 0.00000000 - 0.008286458392 0.00000000 - 0.008291666725 0.00000000 - 0.008296875059 0.00000000 - 0.008302083392 0.00000000 - 0.008307291725 0.00000000 - 0.008312500059 0.00000000 - 0.008317708392 0.00000000 - 0.008322916725 0.00000000 - 0.008328125059 0.00000000 - 0.008333333392 0.00000000 - 0.008338541725 0.00000000 - 0.008343750059 0.00000000 - 0.008348958392 0.00000000 - 0.008354166725 0.00000000 - 0.008359375059 0.00000000 - 0.008364583392 0.00000000 - 0.008369791725 0.00000000 - 0.008375000059 0.00000000 - 0.008380208392 0.00000000 - 0.008385416725 0.00000000 - 0.008390625059 0.00000000 - 0.008395833392 0.00000000 - 0.008401041725 0.00000000 - 0.008406250059 0.00000000 - 0.008411458392 0.00000000 - 0.008416666725 0.00000000 - 0.008421875059 0.00000000 - 0.008427083392 0.00000000 - 0.008432291725 0.00000000 - 0.008437500059 0.00000000 - 0.008442708392 0.00000000 - 0.008447916725 0.00000000 - 0.008453125059 0.00000000 - 0.008458333392 0.00000000 - 0.008463541725 0.00000000 - 0.008468750059 0.00000000 - 0.008473958392 0.00000000 - 0.008479166725 0.00000000 - 0.008484375059 0.00000000 - 0.008489583392 0.00000000 - 0.008494791725 0.00000000 - 0.008500000059 0.00000000 - 0.008505208392 0.00000000 - 0.008510416725 0.00000000 - 0.008515625059 0.00000000 - 0.008520833392 0.00000000 - 0.008526041725 0.00000000 - 0.008531250059 0.00000000 - 0.008536458392 0.00000000 - 0.008541666725 0.00000000 - 0.008546875059 0.00000000 - 0.008552083392 0.00000000 - 0.008557291725 0.00000000 - 0.008562500059 0.00000000 - 0.008567708392 0.00000000 - 0.008572916725 0.00000000 - 0.008578125059 0.00000000 - 0.008583333392 0.00000000 - 0.008588541725 0.00000000 - 0.008593750059 0.00000000 - 0.008598958392 0.00000000 - 0.008604166725 0.00000000 - 0.008609375059 0.00000000 - 0.008614583392 0.00000000 - 0.008619791725 0.00000000 - 0.008625000059 0.00000000 - 0.008630208392 0.00000000 - 0.008635416725 0.00000000 - 0.008640625059 0.00000000 - 0.008645833392 0.00000000 - 0.008651041725 0.00000000 - 0.008656250059 0.00000000 - 0.008661458392 0.00000000 - 0.008666666725 0.00000000 - 0.008671875059 0.00000000 - 0.008677083392 0.00000000 - 0.008682291725 0.00000000 - 0.008687500059 0.00000000 - 0.008692708392 0.00000000 - 0.008697916725 0.00000000 - 0.008703125059 0.00000000 - 0.008708333392 0.00000000 - 0.008713541725 0.00000000 - 0.008718750059 0.00000000 - 0.008723958392 0.00000000 - 0.008729166725 0.00000000 - 0.008734375059 0.00000000 - 0.008739583392 0.00000000 - 0.008744791725 0.00000000 - 0.008750000059 0.00000000 - 0.008755208392 0.00000000 - 0.008760416725 0.00000000 - 0.008765625059 0.00000000 - 0.008770833392 0.00000000 - 0.008776041725 0.00000000 - 0.008781250059 0.00000000 - 0.008786458392 0.00000000 - 0.008791666725 0.00000000 - 0.008796875059 0.00000000 - 0.008802083392 0.00000000 - 0.008807291725 0.00000000 - 0.008812500059 0.00000000 - 0.008817708392 0.00000000 - 0.008822916725 0.00000000 - 0.008828125059 0.00000000 - 0.008833333392 0.00000000 - 0.008838541725 0.00000000 - 0.008843750059 0.00000000 - 0.008848958392 0.00000000 - 0.008854166725 0.00000000 - 0.008859375059 0.00000000 - 0.008864583392 0.00000000 - 0.008869791725 0.00000000 - 0.008875000059 0.00000000 - 0.008880208392 0.00000000 - 0.008885416725 0.00000000 - 0.008890625059 0.00000000 - 0.008895833392 0.00000000 - 0.008901041725 0.00000000 - 0.008906250059 0.00000000 - 0.008911458392 0.00000000 - 0.008916666725 0.00000000 - 0.008921875059 0.00000000 - 0.008927083392 0.00000000 - 0.008932291725 0.00000000 - 0.008937500059 0.00000000 - 0.008942708392 0.00000000 - 0.008947916725 0.00000000 - 0.008953125059 0.00000000 - 0.008958333392 0.00000000 - 0.008963541725 0.00000000 - 0.008968750059 0.00000000 - 0.008973958392 0.00000000 - 0.008979166725 0.00000000 - 0.008984375059 0.00000000 - 0.008989583392 0.00000000 - 0.008994791725 0.00000000 - 0.009000000059 0.00000000 - 0.009005208392 0.00000000 - 0.009010416725 0.00000000 - 0.009015625059 0.00000000 - 0.009020833392 0.00000000 - 0.009026041725 0.00000000 - 0.009031250059 0.00000000 - 0.009036458392 0.00000000 - 0.009041666725 0.00000000 - 0.009046875059 0.00000000 - 0.009052083392 0.00000000 - 0.009057291725 0.00000000 - 0.009062500059 0.00000000 - 0.009067708392 0.00000000 - 0.009072916725 0.00000000 - 0.009078125059 0.00000000 - 0.009083333392 0.00000000 - 0.009088541725 0.00000000 - 0.009093750059 0.00000000 - 0.009098958392 0.00000000 - 0.009104166725 0.00000000 - 0.009109375059 0.00000000 - 0.009114583392 0.00000000 - 0.009119791725 0.00000000 - 0.009125000059 0.00000000 - 0.009130208392 0.00000000 - 0.009135416725 0.00000000 - 0.009140625059 0.00000000 - 0.009145833392 0.00000000 - 0.009151041725 0.00000000 - 0.009156250059 0.00000000 - 0.009161458392 0.00000000 - 0.009166666725 0.00000000 - 0.009171875059 0.00000000 - 0.009177083392 0.00000000 - 0.009182291725 0.00000000 - 0.009187500059 0.00000000 - 0.009192708392 0.00000000 - 0.009197916725 0.00000000 - 0.009203125059 0.00000000 - 0.009208333392 0.00000000 - 0.009213541725 0.00000000 - 0.009218750059 0.00000000 - 0.009223958392 0.00000000 - 0.009229166725 0.00000000 - 0.009234375059 0.00000000 - 0.009239583392 0.00000000 - 0.009244791725 0.00000000 - 0.009250000059 0.00000000 - 0.009255208392 0.00000000 - 0.009260416725 0.00000000 - 0.009265625059 0.00000000 - 0.009270833392 0.00000000 - 0.009276041725 0.00000000 - 0.009281250059 0.00000000 - 0.009286458392 0.00000000 - 0.009291666725 0.00000000 - 0.009296875059 0.00000000 - 0.009302083392 0.00000000 - 0.009307291725 0.00000000 - 0.009312500059 0.00000000 - 0.009317708392 0.00000000 - 0.009322916725 0.00000000 - 0.009328125059 0.00000000 - 0.009333333392 0.00000000 - 0.009338541725 0.00000000 - 0.009343750059 0.00000000 - 0.009348958392 0.00000000 - 0.009354166725 0.00000000 - 0.009359375059 0.00000000 - 0.009364583392 0.00000000 - 0.009369791725 0.00000000 - 0.009375000059 0.00000000 - 0.009380208392 0.00000000 - 0.009385416725 0.00000000 - 0.009390625059 0.00000000 - 0.009395833392 0.00000000 - 0.009401041725 0.00000000 - 0.009406250059 0.00000000 - 0.009411458392 0.00000000 - 0.009416666725 0.00000000 - 0.009421875059 0.00000000 - 0.009427083392 0.00000000 - 0.009432291725 0.00000000 - 0.009437500059 0.00000000 - 0.009442708392 0.00000000 - 0.009447916725 0.00000000 - 0.009453125059 0.00000000 - 0.009458333392 0.00000000 - 0.009463541725 0.00000000 - 0.009468750059 0.00000000 - 0.009473958392 0.00000000 - 0.009479166725 0.00000000 - 0.009484375059 0.00000000 - 0.009489583392 0.00000000 - 0.009494791725 0.00000000 - 0.009500000059 0.00000000 - 0.009505208392 0.00000000 - 0.009510416725 0.00000000 - 0.009515625059 0.00000000 - 0.009520833392 0.00000000 - 0.009526041725 0.00000000 - 0.009531250059 0.00000000 - 0.009536458392 0.00000000 - 0.009541666725 0.00000000 - 0.009546875059 0.00000000 - 0.009552083392 0.00000000 - 0.009557291725 0.00000000 - 0.009562500059 0.00000000 - 0.009567708392 0.00000000 - 0.009572916725 0.00000000 - 0.009578125059 0.00000000 - 0.009583333392 0.00000000 - 0.009588541725 0.00000000 - 0.009593750059 0.00000000 - 0.009598958392 0.00000000 - 0.009604166725 0.00000000 - 0.009609375059 0.00000000 - 0.009614583392 0.00000000 - 0.009619791725 0.00000000 - 0.009625000059 0.00000000 - 0.009630208392 0.00000000 - 0.009635416725 0.00000000 - 0.009640625059 0.00000000 - 0.009645833392 0.00000000 - 0.009651041725 0.00000000 - 0.009656250059 0.00000000 - 0.009661458392 0.00000000 - 0.009666666725 0.00000000 - 0.009671875059 0.00000000 - 0.009677083392 0.00000000 - 0.009682291725 0.00000000 - 0.009687500059 0.00000000 - 0.009692708392 0.00000000 - 0.009697916725 0.00000000 - 0.009703125059 0.00000000 - 0.009708333392 0.00000000 - 0.009713541725 0.00000000 - 0.009718750059 0.00000000 - 0.009723958392 0.00000000 - 0.009729166725 0.00000000 - 0.009734375059 0.00000000 - 0.009739583392 0.00000000 - 0.009744791725 0.00000000 - 0.009750000059 0.00000000 - 0.009755208392 0.00000000 - 0.009760416725 0.00000000 - 0.009765625059 0.00000000 - 0.009770833392 0.00000000 - 0.009776041725 0.00000000 - 0.009781250059 0.00000000 - 0.009786458392 0.00000000 - 0.009791666725 0.00000000 - 0.009796875059 0.00000000 - 0.009802083392 0.00000000 - 0.009807291725 0.00000000 - 0.009812500059 0.00000000 - 0.009817708392 0.00000000 - 0.009822916725 0.00000000 - 0.009828125059 0.00000000 - 0.009833333392 0.00000000 - 0.009838541725 0.00000000 - 0.009843750059 0.00000000 - 0.009848958392 0.00000000 - 0.009854166725 0.00000000 - 0.009859375059 0.00000000 - 0.009864583392 0.00000000 - 0.009869791725 0.00000000 - 0.009875000059 0.00000000 - 0.009880208392 0.00000000 - 0.009885416725 0.00000000 - 0.009890625059 0.00000000 - 0.009895833392 0.00000000 - 0.009901041725 0.00000000 - 0.009906250059 0.00000000 - 0.009911458392 0.00000000 - 0.009916666725 0.00000000 - 0.009921875059 0.00000000 - 0.009927083392 0.00000000 - 0.009932291725 0.00000000 - 0.009937500059 0.00000000 - 0.009942708392 0.00000000 - 0.009947916725 0.00000000 - 0.009953125059 0.00000000 - 0.009958333392 0.00000000 - 0.009963541725 0.00000000 - 0.009968750059 0.00000000 - 0.009973958392 0.00000000 - 0.009979166725 0.00000000 - 0.009984375059 0.00000000 - 0.009989583392 0.00000000 - 0.009994791725 0.00000000 - 0.010000000059 0.00000000 - 0.010005208392 0.00000000 - 0.010010416725 0.00000000 - 0.010015625059 0.00000000 - 0.010020833392 0.00000000 - 0.010026041725 0.00000000 - 0.010031250059 0.00000000 - 0.010036458392 0.00000000 - 0.010041666725 0.00000000 - 0.010046875059 0.00000000 - 0.010052083392 0.00000000 - 0.010057291725 0.00000000 - 0.010062500059 0.00000000 - 0.010067708392 0.00000000 - 0.010072916725 0.00000000 - 0.010078125059 0.00000000 - 0.010083333392 0.00000000 - 0.010088541725 0.00000000 - 0.010093750059 0.00000000 - 0.010098958392 0.00000000 - 0.010104166725 0.00000000 - 0.010109375059 0.00000000 - 0.010114583392 0.00000000 - 0.010119791725 0.00000000 - 0.010125000059 0.00000000 - 0.010130208392 0.00000000 - 0.010135416725 0.00000000 - 0.010140625059 0.00000000 - 0.010145833392 0.00000000 - 0.010151041725 0.00000000 - 0.010156250059 0.00000000 - 0.010161458392 0.00000000 - 0.010166666725 0.00000000 - 0.010171875059 0.00000000 - 0.010177083392 0.00000000 - 0.010182291725 0.00000000 - 0.010187500059 0.00000000 - 0.010192708392 0.00000000 - 0.010197916725 0.00000000 - 0.010203125059 0.00000000 - 0.010208333392 0.00000000 - 0.010213541725 0.00000000 - 0.010218750059 0.00000000 - 0.010223958392 0.00000000 - 0.010229166725 0.00000000 - 0.010234375059 0.00000000 - 0.010239583392 0.00000000 - 0.010244791725 0.00000000 - 0.010250000059 0.00000000 - 0.010255208392 0.00000000 - 0.010260416725 0.00000000 - 0.010265625059 0.00000000 - 0.010270833392 0.00000000 - 0.010276041725 0.00000000 - 0.010281250059 0.00000000 - 0.010286458392 0.00000000 - 0.010291666725 0.00000000 - 0.010296875059 0.00000000 - 0.010302083392 0.00000000 - 0.010307291725 0.00000000 - 0.010312500059 0.00000000 - 0.010317708392 0.00000000 - 0.010322916725 0.00000000 - 0.010328125059 0.00000000 - 0.010333333392 0.00000000 - 0.010338541725 0.00000000 - 0.010343750059 0.00000000 - 0.010348958392 0.00000000 - 0.010354166725 0.00000000 - 0.010359375059 0.00000000 - 0.010364583392 0.00000000 - 0.010369791725 0.00000000 - 0.010375000059 0.00000000 - 0.010380208392 0.00000000 - 0.010385416725 0.00000000 - 0.010390625059 0.00000000 - 0.010395833392 0.00000000 - 0.010401041725 0.00000000 - 0.010406250059 0.00000000 - 0.010411458392 0.00000000 - 0.010416666725 0.00000000 - 0.010421875059 0.00000000 - 0.010427083392 0.00000000 - 0.010432291725 0.00000000 - 0.010437500059 0.00000000 - 0.010442708392 0.00000000 - 0.010447916725 0.00000000 - 0.010453125059 0.00000000 - 0.010458333392 0.00000000 - 0.010463541725 0.00000000 - 0.010468750059 0.00000000 - 0.010473958392 0.00000000 - 0.010479166725 0.00000000 - 0.010484375059 0.00000000 - 0.010489583392 0.00000000 - 0.010494791725 0.00000000 - 0.010500000059 0.00000000 - 0.010505208392 0.00000000 - 0.010510416725 0.00000000 - 0.010515625059 0.00000000 - 0.010520833392 0.00000000 - 0.010526041725 0.00000000 - 0.010531250059 0.00000000 - 0.010536458392 0.00000000 - 0.010541666725 0.00000000 - 0.010546875059 0.00000000 - 0.010552083392 0.00000000 - 0.010557291725 0.00000000 - 0.010562500059 0.00000000 - 0.010567708392 0.00000000 - 0.010572916725 0.00000000 - 0.010578125059 0.00000000 - 0.010583333392 0.00000000 - 0.010588541725 0.00000000 - 0.010593750059 0.00000000 - 0.010598958392 0.00000000 - 0.010604166725 0.00000000 - 0.010609375059 0.00000000 - 0.010614583392 0.00000000 - 0.010619791725 0.00000000 - 0.010625000059 0.00000000 - 0.010630208392 0.00000000 - 0.010635416725 0.00000000 - 0.010640625059 0.00000000 - 0.010645833392 0.00000000 - 0.010651041725 0.00000000 - 0.010656250059 0.00000000 - 0.010661458392 0.00000000 - 0.010666666725 0.00000000 - 0.010671875059 0.00000000 - 0.010677083392 0.00000000 - 0.010682291725 0.00000000 - 0.010687500059 0.00000000 - 0.010692708392 0.00000000 - 0.010697916725 0.00000000 - 0.010703125059 0.00000000 - 0.010708333392 0.00000000 - 0.010713541725 0.00000000 - 0.010718750059 0.00000000 - 0.010723958392 0.00000000 - 0.010729166725 0.00000000 - 0.010734375059 0.00000000 - 0.010739583392 0.00000000 - 0.010744791725 0.00000000 - 0.010750000059 0.00000000 - 0.010755208392 0.00000000 - 0.010760416725 0.00000000 - 0.010765625059 0.00000000 - 0.010770833392 0.00000000 - 0.010776041725 0.00000000 - 0.010781250059 0.00000000 - 0.010786458392 0.00000000 - 0.010791666725 0.00000000 - 0.010796875059 0.00000000 - 0.010802083392 0.00000000 - 0.010807291725 0.00000000 - 0.010812500059 0.00000000 - 0.010817708392 0.00000000 - 0.010822916725 0.00000000 - 0.010828125059 0.00000000 - 0.010833333392 0.00000000 - 0.010838541725 0.00000000 - 0.010843750059 0.00000000 - 0.010848958392 0.00000000 - 0.010854166725 0.00000000 - 0.010859375059 0.00000000 - 0.010864583392 0.00000000 - 0.010869791725 0.00000000 - 0.010875000059 0.00000000 - 0.010880208392 0.00000000 - 0.010885416725 0.00000000 - 0.010890625059 0.00000000 - 0.010895833392 0.00000000 - 0.010901041725 0.00000000 - 0.010906250059 0.00000000 - 0.010911458392 0.00000000 - 0.010916666725 0.00000000 - 0.010921875059 0.00000000 - 0.010927083392 0.00000000 - 0.010932291725 0.00000000 - 0.010937500059 0.00000000 - 0.010942708392 0.00000000 - 0.010947916725 0.00000000 - 0.010953125059 0.00000000 - 0.010958333392 0.00000000 - 0.010963541725 0.00000000 - 0.010968750059 0.00000000 - 0.010973958392 0.00000000 - 0.010979166725 0.00000000 - 0.010984375059 0.00000000 - 0.010989583392 0.00000000 - 0.010994791725 0.00000000 - 0.011000000059 0.00000000 - 0.011005208392 0.00000000 - 0.011010416725 0.00000000 - 0.011015625059 0.00000000 - 0.011020833392 0.00000000 - 0.011026041725 0.00000000 - 0.011031250059 0.00000000 - 0.011036458392 0.00000000 - 0.011041666725 0.00000000 - 0.011046875059 0.00000000 - 0.011052083392 0.00000000 - 0.011057291725 0.00000000 - 0.011062500059 0.00000000 - 0.011067708392 0.00000000 - 0.011072916725 0.00000000 - 0.011078125059 0.00000000 - 0.011083333392 0.00000000 - 0.011088541725 0.00000000 - 0.011093750059 0.00000000 - 0.011098958392 0.00000000 - 0.011104166725 0.00000000 - 0.011109375059 0.00000000 - 0.011114583392 0.00000000 - 0.011119791725 0.00000000 - 0.011125000059 0.00000000 - 0.011130208392 0.00000000 - 0.011135416725 0.00000000 - 0.011140625059 0.00000000 - 0.011145833392 0.00000000 - 0.011151041725 0.00000000 - 0.011156250059 0.00000000 - 0.011161458392 0.00000000 - 0.011166666725 0.00000000 - 0.011171875059 0.00000000 - 0.011177083392 0.00000000 - 0.011182291725 0.00000000 - 0.011187500059 0.00000000 - 0.011192708392 0.00000000 - 0.011197916725 0.00000000 - 0.011203125059 0.00000000 - 0.011208333392 0.00000000 - 0.011213541725 0.00000000 - 0.011218750059 0.00000000 - 0.011223958392 0.00000000 - 0.011229166725 0.00000000 - 0.011234375059 0.00000000 - 0.011239583392 0.00000000 - 0.011244791725 0.00000000 - 0.011250000059 0.00000000 - 0.011255208392 0.00000000 - 0.011260416725 0.00000000 - 0.011265625059 0.00000000 - 0.011270833392 0.00000000 - 0.011276041725 0.00000000 - 0.011281250059 0.00000000 - 0.011286458392 0.00000000 - 0.011291666725 0.00000000 - 0.011296875059 0.00000000 - 0.011302083392 0.00000000 - 0.011307291725 0.00000000 - 0.011312500059 0.00000000 - 0.011317708392 0.00000000 - 0.011322916725 0.00000000 - 0.011328125059 0.00000000 - 0.011333333392 0.00000000 - 0.011338541725 0.00000000 - 0.011343750059 0.00000000 - 0.011348958392 0.00000000 - 0.011354166725 0.00000000 - 0.011359375059 0.00000000 - 0.011364583392 0.00000000 - 0.011369791725 0.00000000 - 0.011375000059 0.00000000 - 0.011380208392 0.00000000 - 0.011385416725 0.00000000 - 0.011390625059 0.00000000 - 0.011395833392 0.00000000 - 0.011401041725 0.00000000 - 0.011406250059 0.00000000 - 0.011411458392 0.00000000 - 0.011416666725 0.00000000 - 0.011421875059 0.00000000 - 0.011427083392 0.00000000 - 0.011432291725 0.00000000 - 0.011437500059 0.00000000 - 0.011442708392 0.00000000 - 0.011447916725 0.00000000 - 0.011453125059 0.00000000 - 0.011458333392 0.00000000 - 0.011463541725 0.00000000 - 0.011468750059 0.00000000 - 0.011473958392 0.00000000 - 0.011479166725 0.00000000 - 0.011484375059 0.00000000 - 0.011489583392 0.00000000 - 0.011494791725 0.00000000 - 0.011500000059 0.00000000 - 0.011505208392 0.00000000 - 0.011510416725 0.00000000 - 0.011515625059 0.00000000 - 0.011520833392 0.00000000 - 0.011526041725 0.00000000 - 0.011531250059 0.00000000 - 0.011536458392 0.00000000 - 0.011541666725 0.00000000 - 0.011546875059 0.00000000 - 0.011552083392 0.00000000 - 0.011557291725 0.00000000 - 0.011562500059 0.00000000 - 0.011567708392 0.00000000 - 0.011572916725 0.00000000 - 0.011578125059 0.00000000 - 0.011583333392 0.00000000 - 0.011588541725 0.00000000 - 0.011593750059 0.00000000 - 0.011598958392 0.00000000 - 0.011604166725 0.00000000 - 0.011609375059 0.00000000 - 0.011614583392 0.00000000 - 0.011619791725 0.00000000 - 0.011625000059 0.00000000 - 0.011630208392 0.00000000 - 0.011635416725 0.00000000 - 0.011640625059 0.00000000 - 0.011645833392 0.00000000 - 0.011651041725 0.00000000 - 0.011656250059 0.00000000 - 0.011661458392 0.00000000 - 0.011666666725 0.00000000 - 0.011671875059 0.00000000 - 0.011677083392 0.00000000 - 0.011682291725 0.00000000 - 0.011687500059 0.00000000 - 0.011692708392 0.00000000 - 0.011697916725 0.00000000 - 0.011703125059 0.00000000 - 0.011708333392 0.00000000 - 0.011713541725 0.00000000 - 0.011718750059 0.00000000 - 0.011723958392 0.00000000 - 0.011729166725 0.00000000 - 0.011734375059 0.00000000 - 0.011739583392 0.00000000 - 0.011744791725 0.00000000 - 0.011750000059 0.00000000 - 0.011755208392 0.00000000 - 0.011760416725 0.00000000 - 0.011765625059 0.00000000 - 0.011770833392 0.00000000 - 0.011776041725 0.00000000 - 0.011781250059 0.00000000 - 0.011786458392 0.00000000 - 0.011791666725 0.00000000 - 0.011796875059 0.00000000 - 0.011802083392 0.00000000 - 0.011807291725 0.00000000 - 0.011812500059 0.00000000 - 0.011817708392 0.00000000 - 0.011822916725 0.00000000 - 0.011828125059 0.00000000 - 0.011833333392 0.00000000 - 0.011838541725 0.00000000 - 0.011843750059 0.00000000 - 0.011848958392 0.00000000 - 0.011854166725 0.00000000 - 0.011859375059 0.00000000 - 0.011864583392 0.00000000 - 0.011869791725 0.00000000 - 0.011875000059 0.00000000 - 0.011880208392 0.00000000 - 0.011885416725 0.00000000 - 0.011890625059 0.00000000 - 0.011895833392 0.00000000 - 0.011901041725 0.00000000 - 0.011906250059 0.00000000 - 0.011911458392 0.00000000 - 0.011916666725 0.00000000 - 0.011921875059 0.00000000 - 0.011927083392 0.00000000 - 0.011932291725 0.00000000 - 0.011937500059 0.00000000 - 0.011942708392 0.00000000 - 0.011947916725 0.00000000 - 0.011953125059 0.00000000 - 0.011958333392 0.00000000 - 0.011963541725 0.00000000 - 0.011968750059 0.00000000 - 0.011973958392 0.00000000 - 0.011979166725 0.00000000 - 0.011984375059 0.00000000 - 0.011989583392 0.00000000 - 0.011994791725 0.00000000 - 0.012000000059 0.00000000 - 0.012005208392 0.00000000 - 0.012010416725 0.00000000 - 0.012015625059 0.00000000 - 0.012020833392 0.00000000 - 0.012026041725 0.00000000 - 0.012031250059 0.00000000 - 0.012036458392 0.00000000 - 0.012041666725 0.00000000 - 0.012046875059 0.00000000 - 0.012052083392 0.00000000 - 0.012057291725 0.00000000 - 0.012062500059 0.00000000 - 0.012067708392 0.00000000 - 0.012072916725 0.00000000 - 0.012078125059 0.00000000 - 0.012083333392 0.00000000 - 0.012088541725 0.00000000 - 0.012093750059 0.00000000 - 0.012098958392 0.00000000 - 0.012104166725 0.00000000 - 0.012109375059 0.00000000 - 0.012114583392 0.00000000 - 0.012119791725 0.00000000 - 0.012125000059 0.00000000 - 0.012130208392 0.00000000 - 0.012135416725 0.00000000 - 0.012140625059 0.00000000 - 0.012145833392 0.00000000 - 0.012151041725 0.00000000 - 0.012156250059 0.00000000 - 0.012161458392 0.00000000 - 0.012166666725 0.00000000 - 0.012171875059 0.00000000 - 0.012177083392 0.00000000 - 0.012182291725 0.00000000 - 0.012187500059 0.00000000 - 0.012192708392 0.00000000 - 0.012197916725 0.00000000 - 0.012203125059 0.00000000 - 0.012208333392 0.00000000 - 0.012213541725 0.00000000 - 0.012218750059 0.00000000 - 0.012223958392 0.00000000 - 0.012229166725 0.00000000 - 0.012234375059 0.00000000 - 0.012239583392 0.00000000 - 0.012244791725 0.00000000 - 0.012250000059 0.00000000 - 0.012255208392 0.00000000 - 0.012260416725 0.00000000 - 0.012265625059 0.00000000 - 0.012270833392 0.00000000 - 0.012276041725 0.00000000 - 0.012281250059 0.00000000 - 0.012286458392 0.00000000 - 0.012291666725 0.00000000 - 0.012296875059 0.00000000 - 0.012302083392 0.00000000 - 0.012307291725 0.00000000 - 0.012312500059 0.00000000 - 0.012317708392 0.00000000 - 0.012322916725 0.00000000 - 0.012328125059 0.00000000 - 0.012333333392 0.00000000 - 0.012338541725 0.00000000 - 0.012343750059 0.00000000 - 0.012348958392 0.00000000 - 0.012354166725 0.00000000 - 0.012359375059 0.00000000 - 0.012364583392 0.00000000 - 0.012369791725 0.00000000 - 0.012375000059 0.00000000 - 0.012380208392 0.00000000 - 0.012385416725 0.00000000 - 0.012390625059 0.00000000 - 0.012395833392 0.00000000 - 0.012401041725 0.00000000 - 0.012406250059 0.00000000 - 0.012411458392 0.00000000 - 0.012416666725 0.00000000 - 0.012421875059 0.00000000 - 0.012427083392 0.00000000 - 0.012432291725 0.00000000 - 0.012437500059 0.00000000 - 0.012442708392 0.00000000 - 0.012447916725 0.00000000 - 0.012453125059 0.00000000 - 0.012458333392 0.00000000 - 0.012463541725 0.00000000 - 0.012468750059 0.00000000 - 0.012473958392 0.00000000 - 0.012479166725 0.00000000 - 0.012484375059 0.00000000 - 0.012489583392 0.00000000 - 0.012494791725 0.00000000 - 0.012500000059 0.00000000 - 0.012505208392 0.00000000 - 0.012510416725 0.00000000 - 0.012515625059 0.00000000 - 0.012520833392 0.00000000 - 0.012526041725 0.00000000 - 0.012531250059 0.00000000 - 0.012536458392 0.00000000 - 0.012541666725 0.00000000 - 0.012546875059 0.00000000 - 0.012552083392 0.00000000 - 0.012557291725 0.00000000 - 0.012562500059 0.00000000 - 0.012567708392 0.00000000 - 0.012572916725 0.00000000 - 0.012578125059 0.00000000 - 0.012583333392 0.00000000 - 0.012588541725 0.00000000 - 0.012593750059 0.00000000 - 0.012598958392 0.00000000 - 0.012604166725 0.00000000 - 0.012609375059 0.00000000 - 0.012614583392 0.00000000 - 0.012619791725 -0.00000856 - 0.012625000059 -0.00000915 - 0.012630208392 -0.00001150 - 0.012635416725 -0.00001883 - 0.012640625059 -0.00003960 - 0.012645833392 -0.00009150 - 0.012651041725 -0.00020830 - 0.012656250059 -0.00044598 - 0.012661458392 -0.00088614 - 0.012666666725 -0.00163240 - 0.012671875059 -0.00279726 - 0.012677083392 -0.00448125 - 0.012682291725 -0.00674948 -""" - -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) - - -# Write our control files -with open("galeisbs.con", "w") as f: - f.write(controlFileString) -with open("VTEMPlusPulseSouthernThomson.stm", "w") as f: - f.write(VTEMPlusPulseSouthernThomsonFileString) -with open("VTEMPlusPulseSouthernThomson.cfm", "w") as f: - f.write(VTEMPlusPulseSouthernThomsonCFMFileString) -cloudUpload("galeisbs.con", "galeisbs.con") -cloudUpload("VTEMPlusPulseSouthernThomson.stm", "VTEMPlusPulseSouthernThomson.stm") -cloudUpload("VTEMPlusPulseSouthernThomson.cfm", "VTEMPlusPulseSouthernThomson.cfm") - -# Read the WFS Input data into a CSV format -tree = ET.parse("${wfs-input-xml}"); -root = tree.getroot(); -csvArray=[]; -for featureMembers in root: - for aemsurveys in featureMembers: - row = [] - for field in aemsurveys: - #Non simple properties are ignored - if len(field) > 0: - continue - row.append(field.text) - csvArray.append(row) -with open("aemInput.dat",'w') as f: - writer = csv.writer(f, delimiter=' ', lineterminator='\n') - for row in csvArray: - writer.writerow(row) -cloudUpload("aemInput.dat", "aemInput.dat") - -# Execute AEM Process via MPI -subprocess.call(["mpirun", "-n", "${n-threads}", "/usr/bin/gaaem/galeisbstdem.exe", "galeisbs.con"]) - -# Upload results -inversionFiles = glob.glob('inversion.output.*') -print 'About to upload the following files:' -print inversionFiles -for fn in inversionFiles: - cloudUpload(fn, fn) - -# Concatenate output files for easier parsing -ascFiles = sorted(glob.glob('inversion.output.*.asc')) -with open('inversion.output.asc.combined', 'w') as outfile: - for fname in ascFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.asc.combined', 'inversion.output.asc.combined') -logFiles = sorted(glob.glob('inversion.output.*.log')) -with open('inversion.output.log.combined', 'w') as outfile: - for fname in logFiles: - with open(fname) as infile: - for line in infile: - outfile.write(line) -cloudUpload('inversion.output.log.combined', 'inversion.output.log.combined') diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-gravity-point.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-gravity-point.py deleted file mode 100644 index 59a771a77..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-gravity-point.py +++ /dev/null @@ -1,210 +0,0 @@ -#!/usr/bin/python2.6 - -############################################################################## -# -# Copyright (c) 2009-2013 by University of Queensland -# http://www.uq.edu.au -# -# Primary Business: Queensland, Australia -# Licensed under the Open Software License version 3.0 -# http://www.opensource.org/licenses/osl-3.0.php -# -# Development until 2012 by Earth Systems Science Computational Center (ESSCC) -# Development since 2012 by School of Earth Sciences -# -############################################################################## - -####### Start of data preparation ######### - - -import csv -import xml.etree.ElementTree as ET -import sys -import subprocess -import os - -N_THREADS = ${n-threads} -DATAFILE = '${inversion-file}' -XSIZE = ${xsize} -YSIZE = ${ysize} - -try: - from esys.downunder import * - from esys.escript import unitsSI as U - from esys.weipa import saveSilo -except ImportError: - line=["/opt/escript/bin/run-escript","-t" + str(N_THREADS)]+sys.argv - ret=subprocess.call(line) - sys.exit(ret) - - -# File name for pre process input file - - - -class Vgl(file): - - def __init__(self, file): - self.header = ['lat','long','elevation']; - self.run(file); - - - def run(self,file): - dics = self.getXMLDict(file); - self.writeToCSV(dics,"dem.csv"); - self.writeVRT("dem.vrt"); - self.convertToGridWithGDAL(); - - def writeToCSV(self,dictionaryData,filename): - with open(filename,'w') as f: - writer = csv.DictWriter(f,fieldnames=self.header); - #python2.7 only- writer.writeheader(); - writer.writerow(dict((fn,fn) for fn in writer.fieldnames)); - for d in dictionaryData: - writer.writerow(d); - - def writeVRT(self,filename): - with open(filename,'w') as f: - f.write("\n"); - f.write(" \n"); - f.write(" dem.csv\n"); - f.write(" EPSG:4283\n"); - f.write(" \n"); - f.write(" \n"); - f.write("\n"); - - - - def getXMLDict(self,filename): - tree = ET.parse(filename); - root = tree.getroot(); - csvArray=[]; - self.latMin=90.00; - self.latMax=-90.00; - self.longMin=180.00; - self.longMax=-180.00; - lat = 1; - long = 0; - - for featureMembers in root: - for gravitypoints in featureMembers: - dict={}; - dict['elevation'] = gravitypoints.find('{http://ga.gov.au}spherical_cap_bouguer_anomaly').text; - points = (gravitypoints.find('{http://www.opengis.net/gml}location/{http://www.opengis.net/gml}Point/{http://www.opengis.net/gml}pos').text).split(); - #we will eventually need to add some smarts to determine lat/long long/lat - dict['lat'] = points[lat]; - dict['long']= points[long]; - if (float(points[long]) > self.longMax): - self.longMax=float(points[long]); - if (float(points[long]) < self.longMin): - self.longMin=float(points[long]); - if (float(points[lat]) > self.latMax): - self.latMax=float(points[lat]); - if (float(points[lat]) < self.latMin): - self.latMin=float(points[lat]); - csvArray.append(dict); - - self.srs=(root[0][0].find('{http://www.opengis.net/gml}location/{http://www.opengis.net/gml}Point')).get('srsName'); - self.srs='EPSG:' + self.srs[-4:]; - return csvArray; - - - def convertToGridWithGDAL(self): - print self.srs; - print "latMax:"+str(self.latMax); - print "latMin:"+str(self.latMin); - print "longMax:"+str(self.longMax); - print "longMin:"+str(self.longMin); - p = subprocess.call(["gdal_grid", "-zfield", "elevation","-a_srs",self.srs, "-a", "invdist:power=2.0:smoothing=1.0", "-txe", str(self.longMin), str(self.longMax), "-tye", str(self.latMin), str(self.latMax), "-outsize", str(XSIZE), str(YSIZE), "-of", "netCDF", "-ot", "Float64", "-l", "dem", "dem.vrt", "dem.nc", "--config", "GDAL_NUM_THREADS", "ALL_CPUS"]); - subprocess.call(["cloud", "upload", "dem.nc", "dem.nc", "--set-acl=public-read"]); - subprocess.call(["cloud", "upload", "dem.csv", "dem.csv", "--set-acl=public-read"]); - subprocess.call(["cloud", "upload", "dem.vrt", "dem.vrt", "--set-acl=public-read"]); - - - -if __name__ == '__main__': - Vgl(DATAFILE); - - -####### End of data preparation ######### - - - - -"""3D gravity inversion using netCDF data""" - -# Filename for post process input data -DATASET = "/root/dem.nc" -# maximum depth (in meters) -DEPTH = ${max-depth} -# buffer zone above data (in meters; 6-10km recommended) -AIR = ${air-buffer} -# number of mesh elements in vertical direction (~1 element per 2km recommended) -NE_Z = ${vertical-mesh-elements} -# amount of horizontal padding (this affects end result, about 20% recommended) -PAD_X = ${x-padding} -PAD_Y = ${y-padding} -MU_GRAVITY = ${mu-gravity} - - - -####### Do not change anything below this line ####### - - - - - -def saveAndUpload(fn, **args): - saveSilo(fn, **args) - subprocess.call(["cloud", "upload", fn, fn, "--set-acl=public-read"]) - -def statusCallback(k, x, Jx, g_Jx, norm_dx): - print("Iteration %s complete. Error=%s" % (k, norm_dx)) - -print("Processing GDAL file now"); -DATA_UNITS = 1e-6 * U.m/(U.sec**2) -source=NetCdfData(DataSource.GRAVITY, DATASET, scale_factor=DATA_UNITS) -db=DomainBuilder() -db.addSource(source) -db.setVerticalExtents(depth=DEPTH, air_layer=AIR, num_cells=NE_Z) -db.setFractionalPadding(PAD_X, PAD_Y) -db.fixDensityBelow(depth=DEPTH) -inv=GravityInversion() -inv.setup(db) -inv.setSolverCallback(statusCallback) -inv.getCostFunction().setTradeOffFactorsModels(MU_GRAVITY) - - -g, chi = db.getGravitySurveys()[0] -density=inv.run() -saveAndUpload('result.silo', gravity_anomaly=g, gravity_weight=chi, density=density) -print("Results saved in result.silo") - - -# Visualise result.silo using VisIt -import visit -visit.LaunchNowin() -saveatts = visit.SaveWindowAttributes() -saveatts.fileName = 'result-visit.png' -saveatts.family = 0 -saveatts.width = 1024 -saveatts.height = 768 -saveatts.resConstraint = saveatts.NoConstraint -saveatts.outputToCurrentDirectory = 1 -visit.SetSaveWindowAttributes(saveatts) -visit.OpenDatabase('result.silo') -visit.AddPlot('Contour', 'density') -c=visit.ContourAttributes() -c.colorType=c.ColorByColorTable -c.colorTableName = "hot" -visit.SetPlotOptions(c) -visit.DrawPlots() -v=visit.GetView3D() -v.viewNormal=(-0.554924, 0.703901, 0.443377) -v.viewUp=(0.272066, -0.3501, 0.896331) -visit.SetView3D(v) -visit.SaveWindow() -subprocess.call(["cloud", "upload", "result-visit.png", "result-visit.png", "--set-acl=public-read"]) -visit.DeleteAllPlots() -visit.CloseDatabase('result.silo') - diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-gravity.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-gravity.py deleted file mode 100644 index 9f24e5b8e..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-gravity.py +++ /dev/null @@ -1,98 +0,0 @@ - -############################################################################## -# -# Copyright (c) 2009-2013 by University of Queensland -# http://www.uq.edu.au -# -# Primary Business: Queensland, Australia -# Licensed under the Open Software License version 3.0 -# http://www.opensource.org/licenses/osl-3.0.php -# -# Development until 2012 by Earth Systems Science Computational Center (ESSCC) -# Development since 2012 by School of Earth Sciences -# -############################################################################## - -"""3D gravity inversion using netCDF data""" - -# Filename for input data -DATASET = '${inversion-file}' -# maximum depth (in meters) -DEPTH = ${max-depth} -# buffer zone above data (in meters; 6-10km recommended) -AIR = ${air-buffer} -# number of mesh elements in vertical direction (~1 element per 2km recommended) -NE_Z = ${vertical-mesh-elements} -# amount of horizontal padding (this affects end result, about 20% recommended) -PAD_X = ${x-padding} -PAD_Y = ${y-padding} - -N_THREADS = ${n-threads} - -####### Do not change anything below this line ####### - -import os -import subprocess -import sys - -try: - from esys.downunder import * - from esys.escript import unitsSI as U - from esys.weipa import saveSilo -except ImportError: - line=["/opt/escript/bin/run-escript","-t" + str(N_THREADS)]+sys.argv - ret=subprocess.call(line) - sys.exit(ret) - -def saveAndUpload(fn, **args): - saveSilo(fn, **args) - subprocess.call(["cloud", "upload", fn, fn, "--set-acl=public-read"]) - -def statusCallback(k, x, Jx, g_Jx, norm_dx): - print("Iteration %s complete. Error=%s" % (k, norm_dx)) - - - -DATA_UNITS = 1e-6 * U.m/(U.sec**2) -source=NetCdfData(DataSource.GRAVITY, DATASET, scale_factor=DATA_UNITS) -db=DomainBuilder() -db.addSource(source) -db.setVerticalExtents(depth=DEPTH, air_layer=AIR, num_cells=NE_Z) -db.setFractionalPadding(PAD_X, PAD_Y) -db.fixDensityBelow(depth=DEPTH) -inv=GravityInversion() -inv.setup(db) -inv.setSolverCallback(statusCallback) -g, chi = db.getGravitySurveys()[0] -density=inv.run() -saveAndUpload('result.silo', gravity_anomaly=g, gravity_weight=chi, density=density) -print("Results saved in result.silo") - - -# Visualise result.silo using VisIt -import visit -visit.LaunchNowin() -saveatts = visit.SaveWindowAttributes() -saveatts.fileName = 'result-visit.png' -saveatts.family = 0 -saveatts.width = 1024 -saveatts.height = 768 -saveatts.resConstraint = saveatts.NoConstraint -saveatts.outputToCurrentDirectory = 1 -visit.SetSaveWindowAttributes(saveatts) -visit.OpenDatabase('result.silo') -visit.AddPlot('Contour', 'density') -c=visit.ContourAttributes() -c.colorType=c.ColorByColorTable -c.colorTableName = "hot" -visit.SetPlotOptions(c) -visit.DrawPlots() -v=visit.GetView3D() -v.viewNormal=(-0.554924, 0.703901, 0.443377) -v.viewUp=(0.272066, -0.3501, 0.896331) -visit.SetView3D(v) -visit.SaveWindow() -subprocess.call(["cloud", "upload", "result-visit.png", "result-visit.png", "--set-acl=public-read"]) -visit.DeleteAllPlots() -visit.CloseDatabase('result.silo') - diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-joint.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-joint.py deleted file mode 100644 index 8610a0142..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-joint.py +++ /dev/null @@ -1,125 +0,0 @@ - -############################################################################## -# -# Copyright (c) 2009-2013 by University of Queensland -# http://www.uq.edu.au -# -# Primary Business: Queensland, Australia -# Licensed under the Open Software License version 3.0 -# http://www.opensource.org/licenses/osl-3.0.php -# -# Development until 2012 by Earth Systems Science Computational Center (ESSCC) -# Development since 2012 by School of Earth Sciences -# -############################################################################## - -"""3D gravity/magnetic joint inversion using netCDF data""" - -# Set parameters -MAGNETIC_DATASET = '${magnetic-file}' -GRAVITY_DATASET = '${gravity-file}' -# background magnetic flux density (B_north, B_east, B_vertical) in nano Tesla. -B_b = [${bb-north}, ${bb-east}, ${bb-vertical}] -# amount of horizontal padding (this affects end result, about 20% recommended) -PAD_X = ${x-padding} -PAD_Y = ${y-padding} -# maximum depth (in meters) -DEPTH = ${max-depth} -# buffer zone above data (in meters; 6-10km recommended) -AIR = ${air-buffer} -# number of mesh elements in vertical direction (~1 element per 2km recommended) -NE_Z = ${vertical-mesh-elements} -# trade-off factors -mu_gravity = ${mu-gravity} -mu_magnetic = ${mu-magnetic} - -N_THREADS = ${n-threads} - -####### Do not change anything below this line ####### - -import os -import subprocess -import sys - -try: - from esys.downunder import * - from esys.escript import unitsSI as U - from esys.weipa import * - -except ImportError: - line=["/opt/escript/bin/run-escript","-t" + str(N_THREADS)]+sys.argv - ret=subprocess.call(line) - sys.exit(ret) - -def saveAndUpload(fn, **args): - saveSilo(fn, **args) - subprocess.call(["cloud", "upload", fn, fn, "--set-acl=public-read"]) - -def statusCallback(k, x, Jx, g_Jx, norm_dx): - print("Iteration %s complete. Error=%s" % (k, norm_dx)) - -B_b=[b*U.Nano*U.Tesla for b in B_b] -MAG_UNITS = U.Nano * U.Tesla -GRAV_UNITS = 1e-6 * U.m/(U.sec**2) - -# Setup and run the inversion -grav_source=NetCdfData(NetCdfData.GRAVITY, GRAVITY_DATASET, scale_factor=GRAV_UNITS) -mag_source=NetCdfData(NetCdfData.MAGNETIC, MAGNETIC_DATASET, scale_factor=MAG_UNITS) -db=DomainBuilder(dim=3) -db.addSource(grav_source) -db.addSource(mag_source) -db.setVerticalExtents(depth=DEPTH, air_layer=AIR, num_cells=NE_Z) -db.setFractionalPadding(pad_x=PAD_X, pad_y=PAD_Y) -db.setBackgroundMagneticFluxDensity(B_b) -db.fixDensityBelow(depth=DEPTH) -db.fixSusceptibilityBelow(depth=DEPTH) - -inv=JointGravityMagneticInversion() -inv.setup(db) -inv.setSolverCallback(statusCallback) -inv.getCostFunction().setTradeOffFactorsModels([mu_gravity, mu_magnetic]) -inv.getCostFunction().setTradeOffFactorsRegularization(mu = [1.,1.], mu_c=1.) - -density, susceptibility = inv.run() -print("density = %s"%density) -print("susceptibility = %s"%susceptibility) - -g, wg = db.getGravitySurveys()[0] -B, wB = db.getMagneticSurveys()[0] -saveAndUpload("result.silo", density=density, gravity_anomaly=g, gravity_weight=wg, susceptibility=susceptibility, magnetic_anomaly=B, magnetic_weight=wB) -print("Results saved in result.silo") - - -# Visualise result.silo using VisIt -import visit -visit.LaunchNowin() -saveatts = visit.SaveWindowAttributes() -saveatts.family = 0 -saveatts.width = 1024 -saveatts.height = 768 -saveatts.resConstraint = saveatts.NoConstraint -saveatts.outputToCurrentDirectory = 1 -saveatts.fileName = 'result-susceptibility.png' -visit.SetSaveWindowAttributes(saveatts) -visit.OpenDatabase('result.silo') -visit.AddPlot('Contour', 'susceptibility') -c=visit.ContourAttributes() -c.colorType=c.ColorByColorTable -c.colorTableName = "hot" -visit.SetPlotOptions(c) -visit.DrawPlots() -visit.SaveWindow() # save susceptibility image -visit.ChangeActivePlotsVar('density') -saveatts.fileName = 'result-density.png' -visit.SetSaveWindowAttributes(saveatts) -v=visit.GetView3D() -v.viewNormal=(-0.554924, 0.703901, 0.443377) -v.viewUp=(0.272066, -0.3501, 0.896331) -visit.SetView3D(v) -visit.SaveWindow() # save density image -visit.DeleteAllPlots() -visit.CloseDatabase('result.silo') - -subprocess.call(["cloud", "upload", "result-density.png", "result-density.png", "--set-acl=public-read"]) -subprocess.call(["cloud", "upload", "result-susceptibility.png", "result-susceptibility.png", "--set-acl=public-read"]) - diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-magnetic.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-magnetic.py deleted file mode 100644 index fd0d152f5..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/escript-magnetic.py +++ /dev/null @@ -1,101 +0,0 @@ - -############################################################################## -# -# Copyright (c) 2009-2013 by University of Queensland -# http://www.uq.edu.au -# -# Primary Business: Queensland, Australia -# Licensed under the Open Software License version 3.0 -# http://www.opensource.org/licenses/osl-3.0.php -# -# Development until 2012 by Earth Systems Science Computational Center (ESSCC) -# Development since 2012 by School of Earth Sciences -# -############################################################################## - -"""3D magnetic inversion example using netCDF data""" - -# Filename for input data -DATASET='${inversion-file}' -# background magnetic flux density (B_north, B_east, B_vertical) in nano Tesla. -B_b = [${bb-north}, ${bb-east}, ${bb-vertical}] -# maximum depth (in meters) -DEPTH = ${max-depth} -# buffer zone above data (in meters; 6-10km recommended) -AIR = ${air-buffer} -# number of mesh elements in vertical direction (~1 element per 2km recommended) -NE_Z = ${vertical-mesh-elements} -# amount of horizontal padding (this affects end result, about 20% recommended) -PAD_X = ${x-padding} -PAD_Y = ${y-padding} - -N_THREADS = ${n-threads} - -####### Do not change anything below this line ####### - -import os -import subprocess -import sys - -try: - from esys.downunder import * - from esys.escript import unitsSI as U - from esys.weipa import saveSilo -except ImportError: - line=["/opt/escript/bin/run-escript","-t" + str(N_THREADS)]+sys.argv - ret=subprocess.call(line) - sys.exit(ret) - -def saveAndUpload(fn, **args): - saveSilo(fn, **args) - subprocess.call(["cloud", "upload", fn, fn, "--set-acl=public-read"]) - -def statusCallback(k, x, Jx, g_Jx, norm_dx): - print("Iteration %s complete. Error=%s" % (k, norm_dx)) - -#Convert entered nano Tesla to Tesla -B_b=[b*U.Nano*U.Tesla for b in B_b] -DATA_UNITS = U.Nano * U.Tesla -source=NetCdfData(DataSource.MAGNETIC, DATASET, scale_factor=DATA_UNITS) -db=DomainBuilder() -db.addSource(source) -db.setVerticalExtents(depth=DEPTH, air_layer=AIR, num_cells=NE_Z) -db.setFractionalPadding(PAD_X, PAD_Y) -db.setBackgroundMagneticFluxDensity(B_b) -db.fixSusceptibilityBelow(depth=DEPTH) -inv=MagneticInversion() -inv.setup(db) -inv.setSolverCallback(statusCallback) -B, w = db.getMagneticSurveys()[0] -susceptibility=inv.run() -saveAndUpload('result.silo', magnetic_anomaly=B, magnetic_weight=w, susceptibility=susceptibility) -print("Results saved in result.silo") - - -# Visualise result.silo using VisIt -import visit -visit.LaunchNowin() -saveatts = visit.SaveWindowAttributes() -saveatts.fileName = 'result-visit.png' -saveatts.family = 0 -saveatts.width = 1024 -saveatts.height = 768 -saveatts.resConstraint = saveatts.NoConstraint -saveatts.outputToCurrentDirectory = 1 -visit.SetSaveWindowAttributes(saveatts) -visit.OpenDatabase('result.silo') -visit.AddPlot('Contour', 'susceptibility') -c=visit.ContourAttributes() -c.colorType=c.ColorByColorTable -c.colorTableName = "hot" -visit.SetPlotOptions(c) -visit.DrawPlots() -v=visit.GetView3D() -v.viewNormal=(-0.554924, 0.703901, 0.443377) -v.viewUp=(0.272066, -0.3501, 0.896331) -visit.SetView3D(v) -visit.SaveWindow() -subprocess.call(["cloud", "upload", "result-visit.png", "result-visit.png", "--set-acl=public-read"]) -visit.DeleteAllPlots() -visit.CloseDatabase('result.silo') - diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/example.txt b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/example.txt deleted file mode 100644 index f034047a9..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/example.txt +++ /dev/null @@ -1 +0,0 @@ -# This is an example template. Example Value = ${example-value}. diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/ubc-gravity.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/ubc-gravity.py deleted file mode 100644 index 734a16bb1..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/ubc-gravity.py +++ /dev/null @@ -1,735 +0,0 @@ -#!/usr/bin/env python - -# VEGL processing script. -# Please load the Job Object before you load other components - -import subprocess, csv, math, os, sys, urllib, glob; - -# Autogenerated Getter/Setter class -class VEGLBBox: - _srs = None - _maxNorthing = None - _minNorthing = None - _maxEasting = None - _minEasting = None - - def __init__(self, srs, maxNorthing, minNorthing, maxEasting, minEasting): - self._srs = srs - self._maxNorthing = maxNorthing - self._minNorthing = minNorthing - self._maxEasting = maxEasting - self._minEasting = minEasting - - def getSrs(self): - return self._srs - - def getMaxNorthing(self): - return self._maxNorthing - - def getMinNorthing(self): - return self._minNorthing - - def getMaxEasting(self): - return self._maxEasting - - def getMinEasting(self): - return self._minEasting - - - # Returns true if the specified northing/easting (assumed to be in the same SRS) - # lies within the spatial area represented by this bounding box. - def isPointInsideArea(self, northing, easting): - return ((easting >= self._minEasting) and (easting <= self._maxEasting) and (northing >= self._minNorthing) and (northing <= self._maxNorthing)) - -# Autogenerated Getter/Setter class -class VEGLParameters: - - _selectionMinEasting = None - _selectionMaxEasting = None - _selectionMinNorthing = None - _selectionMaxNorthing = None - _mgaZone = None - _cellX = None - _cellY = None - _cellZ = None - _inversionDepth = None - _inputCsvFile = None - - def __init__(self, inputCsvFile, selectionMinEasting, selectionMaxEasting, selectionMinNorthing, selectionMaxNorthing, mgaZone, cellX, cellY, cellZ, inversionDepth): - self._inputCsvFile = inputCsvFile - self._selectionMinEasting = selectionMinEasting - self._selectionMaxEasting = selectionMaxEasting - self._selectionMinNorthing = selectionMinNorthing - self._selectionMaxNorthing = selectionMaxNorthing - self._mgaZone = mgaZone - self._cellX = cellX - self._cellY = cellY - self._cellZ = cellZ - self._inversionDepth = inversionDepth - - def getInputCsvFile(self): - return self._inputCsvFile - - def getSelectionMinEasting(self): - return self._selectionMinEasting - - def getSelectionMaxEasting(self): - return self._selectionMaxEasting - - def getSelectionMinNorthing(self): - return self._selectionMinNorthing - - def getSelectionMaxNorthing(self): - return self._selectionMaxNorthing - - def getMgaZone(self): - return self._mgaZone - - def getCellX(self): - return self._cellX - - def getCellY(self): - return self._cellY - - def getCellZ(self): - return self._cellZ - - def getInversionDepth(self): - return self._inversionDepth - - # Gets an instance of VEGLBBox representing the padded bounds - def getSelectedBounds(self): - return VEGLBBox(srs=self._mgaZone, maxNorthing=self._selectionMaxNorthing, maxEasting=self._selectionMaxEasting, minNorthing=self._selectionMinNorthing, minEasting=self._selectionMinEasting) - -# Global parameter instance for reference -VEGLParams = VEGLParameters(inputCsvFile='${job-input-file}', selectionMinEasting=${job-selection-mineast}, selectionMaxEasting=${job-selection-maxeast}, selectionMinNorthing=${job-selection-minnorth}, selectionMaxNorthing=${job-selection-maxnorth}, mgaZone='${job-mgazone}', cellX=${job-cellx}, cellY=${job-celly}, cellZ=${job-cellz}, inversionDepth=${job-inversiondepth}) - -# ----- Autogenerated AWS Utility Functions ----- -# Uploads inFilePath to the specified bucket with the specified key -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) -# ----------------------------------------------- - -#------------------------------------------------------------------------------ -# supporting methods (and dragons, too) go here -#------------------------------------------------------------------------------ -# GLOBAL VARIABLES, USED FOR PROJECTION STUFF -east = 0.0 -north = 0.0 -#too lazy to properly refactor code used for UTM stuff -pi = math.pi -#these are based on WGS84 -#TODO modify for GDA94's spheroid (GRS80) -sm_a = 6378137.0 -#sm_b different for GDA94 - 6356752.314140 -sm_b = 6356752.314 -#first eccentric squared different for GDA94 -#calculate as sm_EccSquared = 1 - (sm_b^2 / sm_a^2) -sm_EccSquared = 6.69437999013e-03 -#Scale factor for UTM coordinates -UTMScaleFactor = 0.9996 - -# PROJECT -# This method does a nice projection from a latitude and longitude -# to an easting and northing wtihin a specified MGA zone. -# Based on http://home.hiwaay.net/~taylorc/toolbox/geography/geoutm.html -# -# Could be replaced with the Python hook to the Proj/4 stuff: -# http://code.google.com/p/pyproj/ -# or Python GDAL bindings -# http://pypi.python.org/pypi/GDAL/ -# given I'm not sure what the portal of doom has on it, I might as well -# write some stuff so it Just Works (trademark, Steve Jobs) -# This'll do for now. -def project(lat, lon, zone): - east = 0.0 - north = 0.0 - east,north = LatLonToUTMXY(lat, lon, int(zone)) - return east,north - -# Vestigial stuff from the projection work I borrowed. -# Will remove one day. -# -# DegToRad -def DegToRad(deg): - return (deg / 180.0 * pi) - -# RadToDeg -def RadToDeg(rad): - return (rad / pi * 180.0) - -# ArcLengthOfMeridian -# Computes the ellipsoidal distance from the equator to a point at a -# given latitude. -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# phi - Latitude of the point, in radians. -# Globals: -# sm_a - Ellipsoid model major axis. -# sm_b - Ellipsoid model minor axis. -# Returns: -# The ellipsoidal distance of the point from the equator, in meters. -def ArcLengthOfMeridian(phi): - # precalculate n - n = (sm_a - sm_b) / (sm_a + sm_b) - # Precalculate alpha - alpha = ((sm_a + sm_b) / 2.0) * (1.0 + (math.pow (n, 2.0) / 4.0) + (math.pow (n, 4.0) / 64.0)) - # Precalculate beta - beta = (-3.0 * n / 2.0) + (9.0 * math.pow (n, 3.0) / 16.0) + (-3.0 * math.pow (n, 5.0) / 32.0) - # Precalculate gamma - gamma = (15.0 * math.pow (n, 2.0) / 16.0) + (-15.0 * math.pow (n, 4.0) / 32.0) - # Precalculate delta - delta = (-35.0 * math.pow (n, 3.0) / 48.0) + (105.0 * math.pow (n, 5.0) / 256.0) - # Precalculate epsilon - epsilon = (315.0 * math.pow (n, 4.0) / 512.0) - # Now calculate the sum of the series and return - result = alpha * (phi + (beta * math.sin (2.0 * phi)) + (gamma * math.sin (4.0 * phi)) + (delta * math.sin (6.0 * phi)) + (epsilon * math.sin (8.0 * phi))) - return result - -# UTMCentralMeridian -# Determines the central meridian for the given UTM zone. -# Inputs: -# zone - An integer value designating the UTM zone, range [1,60]. -# Returns: -# The central meridian for the given UTM zone, in radians, or zero -# if the UTM zone parameter is outside the range [1,60]. -# Range of the central meridian is the radian equivalent of [-177,+177]. -def UTMCentralMeridian(zone): - cmeridian = math.radians(-183.0 + (zone * 6.0)) - return cmeridian - -# FootpointLatitude -# Computes the footpoint latitude for use in converting transverse -# Mercator coordinates to ellipsoidal coordinates -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# y - The UTM northing coordinate, in meters. -# Returns: -# The footpoint latitude, in radians. -def FootpointLatitude(y): - # Precalculate n (Eq. 10.18) - n = (sm_a - sm_b) / (sm_a + sm_b) - # Precalculate alpha_ (Eq. 10.22) - # (Same as alpha in Eq. 10.17) - alpha_ = ((sm_a + sm_b) / 2.0) * (1 + (math.pow (n, 2.0) / 4) + (math.pow (n, 4.0) / 64)) - # Precalculate y_ (Eq. 10.23) - y_ = y / alpha_ - # Precalculate beta_ (Eq. 10.22) - beta_ = (3.0 * n / 2.0) + (-27.0 * math.pow (n, 3.0) / 32.0) + (269.0 * math.pow (n, 5.0) / 512.0) - # Precalculate gamma_ (Eq. 10.22) - gamma_ = (21.0 * math.pow (n, 2.0) / 16.0) + (-55.0 * math.pow (n, 4.0) / 32.0) - # Precalculate delta_ (Eq. 10.22) - delta_ = (151.0 * math.pow (n, 3.0) / 96.0) + (-417.0 * math.pow (n, 5.0) / 128.0) - # Precalculate epsilon_ (Eq. 10.22) - epsilon_ = (1097.0 * math.pow (n, 4.0) / 512.0) - # Now calculate the sum of the series (Eq. 10.21) - result = y_ + (beta_ * math.sin (2.0 * y_)) + (gamma_ * math.sin (4.0 * y_)) + (delta_ * math.sin (6.0 * y_)) + (epsilon_ * math.sin (8.0 * y_)) - return result - -# MapLatLonToXY -# Converts a latitude/longitude pair to x and y coordinates in the -# Transverse Mercator projection. Note that Transverse Mercator is not -# the same as UTM; a scale factor is required to convert between them. -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# phi - Latitude of the point, in radians. -# lambda - Longitude of the point, in radians. -# lambda0 - Longitude of the central meridian to be used, in radians. -# Returns: -# Two values, x and y: x and y coordinates of computed point, not scaled. -def MapLatLonToXY(phi, lambda1, lambda0): - x = 0.0 - y = 0.0 - # Precalculate ep2 - ep2 = (math.pow(sm_a, 2.0) - math.pow(sm_b, 2.0)) / math.pow(sm_b, 2.0) - # Precalculate nu2 - nu2 = ep2 * math.pow(math.cos(phi), 2.0) - # Precalculate N - N = math.pow(sm_a, 2.0) / (sm_b * math.sqrt(1 + nu2)) - # Precalculate t - t = math.tan(phi) - t2 = t * t - tmp = (t2 * t2 * t2) - math.pow(t, 6.0) - # Precalculate l - l = lambda1 - lambda0 - # Precalculate coefficients for l**n in the equations below - # so a normal human being can read the expressions for easting - # and northing - # -- l**1 and l**2 have coefficients of 1.0 - l3coef = 1.0 - t2 + nu2 - l4coef = 5.0 - t2 + 9 * nu2 + 4.0 * (nu2 * nu2) - l5coef = 5.0 - 18.0 * t2 + (t2 * t2) + 14.0 * nu2 - 58.0 * t2 * nu2 - l6coef = 61.0 - 58.0 * t2 + (t2 * t2) + 270.0 * nu2 - 330.0 * t2 * nu2 - l7coef = 61.0 - 479.0 * t2 + 179.0 * (t2 * t2) - (t2 * t2 * t2) - l8coef = 1385.0 - 3111.0 * t2 + 543.0 * (t2 * t2) - (t2 * t2 * t2) - # Calculate easting (x) - x = N * math.cos (phi) * l + (N / 6.0 * math.pow (math.cos (phi), 3.0) * l3coef * math.pow (l, 3.0)) + (N / 120.0 * math.pow (math.cos (phi), 5.0) * l5coef * math.pow (l, 5.0)) + (N / 5040.0 * math.pow (math.cos (phi), 7.0) * l7coef * math.pow (l, 7.0)) - # Calculate northing (y) - y = ArcLengthOfMeridian (phi) + (t / 2.0 * N * math.pow (math.cos (phi), 2.0) * math.pow (l, 2.0)) + (t / 24.0 * N * math.pow (math.cos (phi), 4.0) * l4coef * math.pow (l, 4.0)) + (t / 720.0 * N * math.pow (math.cos (phi), 6.0) * l6coef * math.pow (l, 6.0)) + (t / 40320.0 * N * math.pow (math.cos (phi), 8.0) * l8coef * math.pow (l, 8.0)) - return x,y - -# MapXYToLatLon -# TODO: Function not fixed for Python-ness 28/06/2011 -# Converts x and y coordinates in the Transverse Mercator projection to -# a latitude/longitude pair. Note that Transverse Mercator is not -# the same as UTM; a scale factor is required to convert between them. -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# x - The easting of the point, in meters. -# y - The northing of the point, in meters. -# lambda0 - Longitude of the central meridian to be used, in radians. -# Outputs: -# philambda - A 2-element containing the latitude and longitude -# in radians. -# Returns: -# The function does not return a value. -# Remarks: -# The local variables Nf, nuf2, tf, and tf2 serve the same purpose as -# N, nu2, t, and t2 in MapLatLonToXY, but they are computed with respect -# to the footpoint latitude phif. -# -# x1frac, x2frac, x2poly, x3poly, etc. are to enhance readability and -# to optimize computations. -def MapXYToLatLon(x, y, lambda0, philambda): - # Get the value of phif, the footpoint latitude. - phif = FootpointLatitude (y) - # Precalculate ep2 - ep2 = (math.pow (sm_a, 2.0) - math.pow (sm_b, 2.0)) / math.pow (sm_b, 2.0) - # Precalculate cos (phif) - cf = math.cos (phif) - # Precalculate nuf2 - nuf2 = ep2 * math.pow (cf, 2.0) - # Precalculate Nf and initialize Nfpow - Nf = math.pow (sm_a, 2.0) / (sm_b * math.sqrt (1 + nuf2)) - Nfpow = Nf - # Precalculate tf - tf = math.tan (phif) - tf2 = tf * tf - tf4 = tf2 * tf2 - # Precalculate fractional coefficients for x**n in the equations - # below to simplify the expressions for latitude and longitude. - x1frac = 1.0 / (Nfpow * cf) - Nfpow *= Nf # now equals Nf**2 - x2frac = tf / (2.0 * Nfpow) - Nfpow *= Nf # now equals Nf**3 - x3frac = 1.0 / (6.0 * Nfpow * cf) - Nfpow *= Nf # now equals Nf**4 - x4frac = tf / (24.0 * Nfpow) - Nfpow *= Nf # now equals Nf**5 - x5frac = 1.0 / (120.0 * Nfpow * cf) - Nfpow *= Nf # now equals Nf**6 - x6frac = tf / (720.0 * Nfpow) - Nfpow *= Nf # now equals Nf**7 - x7frac = 1.0 / (5040.0 * Nfpow * cf) - Nfpow *= Nf # now equals Nf**8 - x8frac = tf / (40320.0 * Nfpow) - # Precalculate polynomial coefficients for x**n. - # -- x**1 does not have a polynomial coefficient. - x2poly = -1.0 - nuf2 - x3poly = -1.0 - 2 * tf2 - nuf2 - x4poly = 5.0 + 3.0 * tf2 + 6.0 * nuf2 - 6.0 * tf2 * nuf2 - 3.0 * (nuf2 *nuf2) - 9.0 * tf2 * (nuf2 * nuf2) - x5poly = 5.0 + 28.0 * tf2 + 24.0 * tf4 + 6.0 * nuf2 + 8.0 * tf2 * nuf2 - x6poly = -61.0 - 90.0 * tf2 - 45.0 * tf4 - 107.0 * nuf2 + 162.0 * tf2 * nuf2 - x7poly = -61.0 - 662.0 * tf2 - 1320.0 * tf4 - 720.0 * (tf4 * tf2) - x8poly = 1385.0 + 3633.0 * tf2 + 4095.0 * tf4 + 1575 * (tf4 * tf2) - # Calculate latitude - philambda[0] = phif + x2frac * x2poly * (x * x) + x4frac * x4poly * math.pow (x, 4.0) + x6frac * x6poly * math.pow (x, 6.0) + x8frac * x8poly * math.pow (x, 8.0) - # Calculate longitude - philambda[1] = lambda0 + x1frac * x + x3frac * x3poly * math.pow (x, 3.0) + x5frac * x5poly * math.pow (x, 5.0) + x7frac * x7poly * math.pow (x, 7.0) - return - -# LatLonToUTMXY -# Converts a latitude/longitude pair to x and y coordinates in the -# Universal Transverse Mercator projection. -# Inputs: -# lat - Latitude of the point, in degrees. -# lon - Longitude of the point, in degrees. -# zone - UTM zone to be used for calculating values for x and y. -# If zone is less than 1 or greater than 60, the routine -# will determine the appropriate zone from the value of lon. -# Outputs: -# xy - A 2-element array where the UTM x and y values will be stored. -# Returns: -# The UTM zone used for calculating the values of x and y. -def LatLonToUTMXY(lat, lon, zone): - east,north = MapLatLonToXY(math.radians(lat), math.radians(lon), UTMCentralMeridian (zone)) - # Adjust easting and northing for UTM system. - # magic number on the easting (500000) is the false easting - east = east * UTMScaleFactor + 500000.0 - north = north * UTMScaleFactor - # this is used to add the false northing for southern hemisphere values - if (north < 0.0): - north = north + 10000000.0 - return east,north - -# UTMXYToLatLon -# Converts x and y coordinates in the Universal Transverse Mercator -# projection to a latitude/longitude pair. -# Inputs: -# x - The easting of the point, in meters. -# y - The northing of the point, in meters. -# zone - The UTM zone in which the point lies. -# southhemi - True if the point is in the southern hemisphere; -# false otherwise. -# Outputs: -# latlon - A 2-element array containing the latitude and -# longitude of the point, in radians. -# Returns: -# The function does not return a value. -def UTMXYToLatLon(x, y, zone, southhemi, latlon): - x -= 500000.0 - x /= UTMScaleFactor - # If in southern hemisphere, adjust y accordingly. - if (southhemi): - y -= 10000000.0 - y /= UTMScaleFactor - cmeridian = UTMCentralMeridian (zone) - MapXYToLatLon (x, y, cmeridian, latlon) - return - -#------------------------------------------------------------------------------ -# Methods other than function projection stuff down here -#------------------------------------------------------------------------------ -# GET_MAG_FIELD -# This is a method which gets the magnetic field things we need -# Needs a latitude and longitude and an 'epoch' - time we want the mag field for -# A few URLs can be used to get this -# http://www.ngdc.noaa.gov/geomag/magfield.shtml -# http://www.ga.gov.au/oracle/geomag/agrfform.jsp -# example of the GA one, using AGRF -# http://www.ga.gov.au/bin/geoAGRF?latd=-24&latm=00&lats=00&lond=135&lonm=00&lons=00&elev=0&year=2010&month=01&day=1&Ein=D -# -#TODO: Make it a bit more awesome -def get_mag_field(lat, lon, year, month, day): - #some defaults so it doesn't fall over - declination = 0.0 - inclination = 0.0 - intensity = 50000.0 - #for AGRF call we need decimal degrees turned into lats and lons - latd,latm,lats = decdeg2dms(lat) - lond,lonm,lons = decdeg2dms(lon) - #assume zero elevation - elev = 0 - #successive formatting of URL to make it a bit easier to read - #urlencode doesn't work very well for some reason, but this handcoded way does - #base URL for AGRF online calculation as of 1 July 2011 - base_url = 'http://www.ga.gov.au/bin/geoAGRF?' - #latitude stuff - full_url = base_url + 'latd=' + str(latd) + '&latm=' + str(latm) + '&lats=' + str(lats) - #longitude stuff - full_url = full_url + '&lond=' + str(lond) + '&lonm=' + str(lonm) + '&lons=' + str(lons) - #elevation stuff - full_url = full_url + '&elev=' + str(elev) - #epoch stuff - full_url = full_url + '&year=' + str(year) + '&month=' + str(month) + '&day=' + str(day) - #We want three components - D is declination, I is inclination, F is total field strength - full_url = full_url + '&Ein=D&Ein=I&Ein=F' - - #debugging: what URL are we retrieving? - print 'Retrieving the following URL: ' + full_url - #open the URL, read its full contents into a variable - f = urllib.urlopen(full_url) - agrf_page_contents = f.read() - - #now we need to extract the small section of the page we're looking for - #As of 1 July 2011, it is bounced by
Magnetic Field Components
and a newline

- #Find the start string, and 41 characters to this position index to strip out - #the Magnetic Field Components sentence & formatting characteristics - start_index = agrf_page_contents.find('
Magnetic Field Components
') + 41 - #End index is easier to define - end_index = agrf_page_contents.find('\n

') - #Extract the text between the two indices we defined above - components_contents = agrf_page_contents[start_index:end_index] - #Now we can split them with a newline and
delimiter - #Will provide 3 'component' strings - for component in components_contents.split('\n
'): - #Check the first character and remove the leading characters - #and convert the extracted text to a float - #D means declination... - if component[0:1] == 'D': - declination = float(component[4:-3]) - #I means inclination... - if component[0:1] == 'I': - inclination = float(component[4:-3]) - #and F means total field intensity - if component[0:1] == 'F': - intensity = float(component[4:-3]) - return declination,inclination,intensity - -# DECDEG2DMS -# Converts a decimal degree number into degrees, minutes and seconds. -def decdeg2dms(dd): - mnt,sec = divmod(dd*3600,60) - deg,mnt = divmod(mnt,60) - return deg,mnt,sec - -#------------------------------------------------------------------------------ -# Methods other than function projection stuff down here -#------------------------------------------------------------------------------ -# GET_MAG_FIELD_DATA -# This is a method which gets the magnetic field things we need -# Needs a latitude and longitude and an 'epoch' - time we want the mag field for -# A few URLs can be used to get this -# http://www.ngdc.noaa.gov/geomag/magfield.shtml -# http://www.ga.gov.au/oracle/geomag/agrfform.jsp -# example of the GA one, using AGRF -# http://www.ga.gov.au/bin/geoAGRF?latd=-24&latm=00&lats=00&lond=135&lonm=00&lons=00&elev=0&year=2010&month=01&day=1&Ein=D -# -#TODO: Make it a bit more awesome -def get_mag_field_data(lat, lon, year, month, day): - #some defaults so it doesn't fall over - declination = 0.0 - inclination = 0.0 - intensity = 50000.0 - #for AGRF call we need decimal degrees turned into lats and lons - latd,latm,lats = decdeg2dms(lat) - lond,lonm,lons = decdeg2dms(lon) - #assume zero elevation - elev = 0 - #successive formatting of URL to make it a bit easier to read - #urlencode doesn't work very well for some reason, but this handcoded way does - #base URL for AGRF online calculation as of 1 July 2011 - base_url = 'http://www.ga.gov.au/bin/geoAGRF?' - #latitude stuff - full_url = base_url + 'latd=' + str(latd) + '&latm=' + str(latm) + '&lats=' + str(lats) - #longitude stuff - full_url = full_url + '&lond=' + str(lond) + '&lonm=' + str(lonm) + '&lons=' + str(lons) - #elevation stuff - full_url = full_url + '&elev=' + str(elev) - #epoch stuff - full_url = full_url + '&year=' + str(year) + '&month=' + str(month) + '&day=' + str(day) - #We want three components - D is declination, I is inclination, F is total field strength - full_url = full_url + '&Ein=D&Ein=I&Ein=F' - - #debugging: what URL are we retrieving? - print 'Retrieving the following URL: ' + full_url - #open the URL, read its full contents into a variable - f = urllib.urlopen(full_url) - agrf_page_contents = f.read() - - #now we need to extract the small section of the page we're looking for - #As of 1 July 2011, it is bounced by
Magnetic Field Components
and a newline

- #Find the start string, and 41 characters to this position index to strip out - #the Magnetic Field Components sentence & formatting characteristics - start_index = agrf_page_contents.find('
Magnetic Field Components
') + 41 - #End index is easier to define - end_index = agrf_page_contents.find('\n

') - #Extract the text between the two indices we defined above - components_contents = agrf_page_contents[start_index:end_index] - #Now we can split them with a newline and
delimiter - #Will provide 3 'component' strings - for component in components_contents.split('\n
'): - #Check the first character and remove the leading characters - #and convert the extracted text to a float - #D means declination... - if component[0:1] == 'D': - declination = float(component[4:-3]) - #I means inclination... - if component[0:1] == 'I': - inclination = float(component[4:-3]) - #and F means total field intensity - if component[0:1] == 'F': - intensity = float(component[4:-3]) - return declination,inclination,intensity -# autogenerated function definition -def main(): - # ------------ VEGL - Step 1 --------- - f = file(VEGLParams.getInputCsvFile(), "r") - input_csv = csv.reader(f) - data = [] - lineCount = 0 # The first 2 lines contain text and must be skipped - for strX, strY, strZ in input_csv: - if lineCount > 1: - x = float(strX) - y = float(strY) - z = float(strZ) - data.append([x,y,z]) - lineCount = lineCount + 1 - - # ------------------------------------ - - - VEGLSelectedBox = VEGLParams.getSelectedBounds() - zone = int(VEGLSelectedBox.getSrs()) - temp_data = [] - for x, y, z in data: - newX, newY = project(x, y, zone) - temp_data.append([newX, newY, z]) - data = temp_data - - temp_data = [] - for x, y, z in data: - # isPointInsideArea happens to read northings then eastings, and we store - # northings as y, and eastings as - if VEGLSelectedBox.isPointInsideArea(y,x): - temp_data.append([x,y,z]) - data = temp_data - - # If we have a gravity inversion, we need to correct the units of the supplied gravity data. - # National gravity coverages are in units of micrometres per second squared. - # UBC-GIF gravity inversion expects milliGals, which means we divide supplied properties by 10. - # - i = 0 - for east,north,prop in data: - data[i] = east,north,prop/10 - i = i + 1 - - # UBC-GIF needs a data file in a specific format. - # We need to define a filename ('obs_filename'). - # This includes writing out expected errors in the data, number of data points etc. - print 'Time to write out a data file' - obs_file = 'temp_ubc_obs.asc' - f = file(obs_file, 'w') - f.write(str(len(data)) + '\t! Number of points\n') - # For each data point, write out: Easting, Northing, Elevation, Data, Error - # In this simple example, we assume elevation is 1 m, and error are 2 mGal / nT - for east,north,prop in data: - elevation = 1.0 - error = 2.0 - f.write(str(east) + ' ' + str(north) + ' ' + str(elevation) + ' ' + str(prop) + ' ' + str(error) + '\n') - f.close() - # Step 6: calculate some meshy stuff - # --- Scientific description below --- - # Defines the mesh parameters and writes out a UBC-GIF mesh file. - # Mesh is defined by the minimum and maximum eastings and northings, inversion depth, and respective cell sizes. - # Mesh file name: 'mesh' - minEasting = VEGLSelectedBox.getMinEasting() - maxEasting = VEGLSelectedBox.getMaxEasting() - minNorthing = VEGLSelectedBox.getMinNorthing() - maxNorthing = VEGLSelectedBox.getMaxNorthing() - invDepth = VEGLParams.getInversionDepth() - cell_x = VEGLParams.getCellX() - cell_y = VEGLParams.getCellY() - cell_z = VEGLParams.getCellZ() - num_x_cells = int((maxEasting - minEasting) / cell_x) - num_y_cells = int((maxNorthing - minNorthing) / cell_y) - num_z_cells = int(invDepth / cell_z) - print 'Number of cells in x dimension: ' + str(num_x_cells) + ', number of cells in y dimension: ' + str(num_y_cells) + ' and number of cells in z dimension: ' + str(num_z_cells) - # Define mesh file name here - mesh = 'mesh.msh' - try: - f = file(mesh, 'w') - f.write(str(num_x_cells) + ' ' + str(num_y_cells) + ' ' + str(num_z_cells) + '\n') - f.write(str(minEasting) + ' ' + str(minNorthing) + ' 0\n') - f.write(str(num_x_cells) + '*' + str(cell_x) + '\n') - f.write(str(num_y_cells) + '*' + str(cell_y) + '\n') - f.write(str(num_z_cells) + '*' + str(cell_z)) - f.close() - except IOError, e: - print e - sys.exit(1) - - # Step 7: Write out sensitivity analsysis control file - # --- Scientific description below --- - # There are two parts to running a UBC-GIF inversion. The first involves a sensitivity analysis; - # here we write out the appropriate control files for this analysis. - # File names for things defined outside this method are defined at the top - obs_file = 'temp_ubc_obs.asc' - mesh = 'mesh.msh' - # Sensitivity analysis (*sen3d_MPI) input file - sns_inp = 'sens.inp' - - # Write some files - try: - f = file(sns_inp, 'w') - f.write(mesh + ' ! mesh\n') - f.write(obs_file + ' ! observations file\n') - f.write('null ! topography\n') - f.write('2 ! iwt\n') - f.write('null ! beta, znot\n') - f.write('daub2 ! wavelet\n') - f.write('2 1e-4 ! itol eps\n') - f.close() - - except IOError, e: - print e - sys.exit(1) - - # Step 8: Write out inversion control file - # --- Scientific description below --- - # In the second part to running a UBC-GIF inversion, we need to write out - # the control file for the actual inversion. - # File names for things defined outside this method are defined at the top - obs_file = 'temp_ubc_obs.asc' - inv_inp = 'inv.inp' - try: - f = file(inv_inp, 'w') - f.write('0 !irest\n') - f.write('1 !mode\n') - f.write('1 0.02 !par tolc\n') - f.write(obs_file + ' ! observations file\n') - # file name dependant on type set in JS - f.write('gzinv3d.mtx\n') - f.write('null !initial model\n') - f.write('null !reference model\n') - f.write('null !active cell file\n') - f.write('null !lower, upper bounds\n') - f.write('null Le, Ln, Lz\n') - f.write('SMOOTH_MOD\n') - f.write('null !weighting file\n') - f.write('0\n') - f.close() - except IOError, e: - print e - sys.exit(1) - - # step 9: finalise stuff - I guess this is where we execute two commands - # At a guess, they are the two commented-out lines below? - # Control files, defined elsewhere - sns_inp = 'sens.inp' - sns_out = 'sens.out' - inv_inp = 'inv.inp' - inv_out = 'inv.out' - sensitivity_command = 'mpirun -np ${n-threads} --mca btl self,sm /opt/ubc/gzsen3d_MPI ' + sns_inp + ' > ' + sns_out - inversion_command = 'mpirun -np ${n-threads} --mca btl self,sm /opt/ubc/gzinv3d_MPI ' + inv_inp + ' > ' + inv_out - print 'Sensitivity command: ' + sensitivity_command - print 'Inversion command: ' + inversion_command - sys.stdout.flush() - retcode = subprocess.call(sensitivity_command, shell=True) - print 'sensitivity returned: ' + str(retcode) - sys.stdout.flush() - retcode = subprocess.call(inversion_command, shell=True) - print 'inversion returned: ' + str(retcode) - sys.stdout.flush() - # Upload our logging outs - cloudUpload(sns_out, sns_out) - cloudUpload(inv_out, inv_out) - # Upload the mesh file - cloudUpload(mesh, mesh) - # Upload gravity or magnetic data file - denFiles = glob.glob('*zinv3d*.den') - preFiles = glob.glob('*zinv3d*.pre') - # Upload Final Model + Prediction - print 'Uploading final model and prediction' - invFilesToUpload = [] - if len(denFiles) > 0: - denFiles.sort() - invFilesToUpload.append(denFiles[len(denFiles) - 1]) - if len(preFiles) > 0: - preFiles.sort() - invFilesToUpload.append(preFiles[len(preFiles) - 1]) - print 'About to upload the following files:' - print invFilesToUpload - for invFile in invFilesToUpload: - cloudUpload(invFile, invFile) - -# autogenerated main definition -if __name__ == "__main__": - main() - diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/ubc-magnetic.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/ubc-magnetic.py deleted file mode 100644 index 5fbd6293c..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/ubc-magnetic.py +++ /dev/null @@ -1,733 +0,0 @@ -#!/usr/bin/env python - -# VEGL processing script. -# Please load the Job Object before you load other components - -import subprocess, csv, math, os, sys, urllib, glob; - -# Autogenerated Getter/Setter class -class VEGLBBox: - _srs = None - _maxNorthing = None - _minNorthing = None - _maxEasting = None - _minEasting = None - - def __init__(self, srs, maxNorthing, minNorthing, maxEasting, minEasting): - self._srs = srs - self._maxNorthing = maxNorthing - self._minNorthing = minNorthing - self._maxEasting = maxEasting - self._minEasting = minEasting - - def getSrs(self): - return self._srs - - def getMaxNorthing(self): - return self._maxNorthing - - def getMinNorthing(self): - return self._minNorthing - - def getMaxEasting(self): - return self._maxEasting - - def getMinEasting(self): - return self._minEasting - - - # Returns true if the specified northing/easting (assumed to be in the same SRS) - # lies within the spatial area represented by this bounding box. - def isPointInsideArea(self, northing, easting): - return ((easting >= self._minEasting) and (easting <= self._maxEasting) and (northing >= self._minNorthing) and (northing <= self._maxNorthing)) - -# Autogenerated Getter/Setter class -class VEGLParameters: - - _selectionMinEasting = None - _selectionMaxEasting = None - _selectionMinNorthing = None - _selectionMaxNorthing = None - _mgaZone = None - _cellX = None - _cellY = None - _cellZ = None - _inversionDepth = None - _inputCsvFile = None - - def __init__(self, inputCsvFile, selectionMinEasting, selectionMaxEasting, selectionMinNorthing, selectionMaxNorthing, mgaZone, cellX, cellY, cellZ, inversionDepth): - self._inputCsvFile = inputCsvFile - self._selectionMinEasting = selectionMinEasting - self._selectionMaxEasting = selectionMaxEasting - self._selectionMinNorthing = selectionMinNorthing - self._selectionMaxNorthing = selectionMaxNorthing - self._mgaZone = mgaZone - self._cellX = cellX - self._cellY = cellY - self._cellZ = cellZ - self._inversionDepth = inversionDepth - - def getInputCsvFile(self): - return self._inputCsvFile - - def getSelectionMinEasting(self): - return self._selectionMinEasting - - def getSelectionMaxEasting(self): - return self._selectionMaxEasting - - def getSelectionMinNorthing(self): - return self._selectionMinNorthing - - def getSelectionMaxNorthing(self): - return self._selectionMaxNorthing - - def getMgaZone(self): - return self._mgaZone - - def getCellX(self): - return self._cellX - - def getCellY(self): - return self._cellY - - def getCellZ(self): - return self._cellZ - - def getInversionDepth(self): - return self._inversionDepth - - # Gets an instance of VEGLBBox representing the padded bounds - def getSelectedBounds(self): - return VEGLBBox(srs=self._mgaZone, maxNorthing=self._selectionMaxNorthing, maxEasting=self._selectionMaxEasting, minNorthing=self._selectionMinNorthing, minEasting=self._selectionMinEasting) - -# Global parameter instance for reference -VEGLParams = VEGLParameters(inputCsvFile='${job-input-file}', selectionMinEasting=${job-selection-mineast}, selectionMaxEasting=${job-selection-maxeast}, selectionMinNorthing=${job-selection-minnorth}, selectionMaxNorthing=${job-selection-maxnorth}, mgaZone='${job-mgazone}', cellX=${job-cellx}, cellY=${job-celly}, cellZ=${job-cellz}, inversionDepth=${job-inversiondepth}) - -# ----- Autogenerated AWS Utility Functions ----- -# Uploads inFilePath to the specified bucket with the specified key -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print ("cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode)) - -# downloads the specified key from bucket and writes it to outfile -def cloudDownload(cloudKey, outFilePath): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "download",cloudBucket,cloudDir,cloudKey, outFilePath]) - print "cloudDownload: " + queryPath + " to " + outFilePath + " returned " + str(retcode) -# ----------------------------------------------- - -#------------------------------------------------------------------------------ -# supporting methods (and dragons, too) go here -#------------------------------------------------------------------------------ -# GLOBAL VARIABLES, USED FOR PROJECTION STUFF -east = 0.0 -north = 0.0 -#too lazy to properly refactor code used for UTM stuff -pi = math.pi -#these are based on WGS84 -#TODO modify for GDA94's spheroid (GRS80) -sm_a = 6378137.0 -#sm_b different for GDA94 - 6356752.314140 -sm_b = 6356752.314 -#first eccentric squared different for GDA94 -#calculate as sm_EccSquared = 1 - (sm_b^2 / sm_a^2) -sm_EccSquared = 6.69437999013e-03 -#Scale factor for UTM coordinates -UTMScaleFactor = 0.9996 - -# PROJECT -# This method does a nice projection from a latitude and longitude -# to an easting and northing wtihin a specified MGA zone. -# Based on http://home.hiwaay.net/~taylorc/toolbox/geography/geoutm.html -# -# Could be replaced with the Python hook to the Proj/4 stuff: -# http://code.google.com/p/pyproj/ -# or Python GDAL bindings -# http://pypi.python.org/pypi/GDAL/ -# given I'm not sure what the portal of doom has on it, I might as well -# write some stuff so it Just Works (trademark, Steve Jobs) -# This'll do for now. -def project(lat, lon, zone): - east = 0.0 - north = 0.0 - east,north = LatLonToUTMXY(lat, lon, int(zone)) - return east,north - -# Vestigial stuff from the projection work I borrowed. -# Will remove one day. -# -# DegToRad -def DegToRad(deg): - return (deg / 180.0 * pi) - -# RadToDeg -def RadToDeg(rad): - return (rad / pi * 180.0) - -# ArcLengthOfMeridian -# Computes the ellipsoidal distance from the equator to a point at a -# given latitude. -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# phi - Latitude of the point, in radians. -# Globals: -# sm_a - Ellipsoid model major axis. -# sm_b - Ellipsoid model minor axis. -# Returns: -# The ellipsoidal distance of the point from the equator, in meters. -def ArcLengthOfMeridian(phi): - # precalculate n - n = (sm_a - sm_b) / (sm_a + sm_b) - # Precalculate alpha - alpha = ((sm_a + sm_b) / 2.0) * (1.0 + (math.pow (n, 2.0) / 4.0) + (math.pow (n, 4.0) / 64.0)) - # Precalculate beta - beta = (-3.0 * n / 2.0) + (9.0 * math.pow (n, 3.0) / 16.0) + (-3.0 * math.pow (n, 5.0) / 32.0) - # Precalculate gamma - gamma = (15.0 * math.pow (n, 2.0) / 16.0) + (-15.0 * math.pow (n, 4.0) / 32.0) - # Precalculate delta - delta = (-35.0 * math.pow (n, 3.0) / 48.0) + (105.0 * math.pow (n, 5.0) / 256.0) - # Precalculate epsilon - epsilon = (315.0 * math.pow (n, 4.0) / 512.0) - # Now calculate the sum of the series and return - result = alpha * (phi + (beta * math.sin (2.0 * phi)) + (gamma * math.sin (4.0 * phi)) + (delta * math.sin (6.0 * phi)) + (epsilon * math.sin (8.0 * phi))) - return result - -# UTMCentralMeridian -# Determines the central meridian for the given UTM zone. -# Inputs: -# zone - An integer value designating the UTM zone, range [1,60]. -# Returns: -# The central meridian for the given UTM zone, in radians, or zero -# if the UTM zone parameter is outside the range [1,60]. -# Range of the central meridian is the radian equivalent of [-177,+177]. -def UTMCentralMeridian(zone): - cmeridian = math.radians(-183.0 + (zone * 6.0)) - return cmeridian - -# FootpointLatitude -# Computes the footpoint latitude for use in converting transverse -# Mercator coordinates to ellipsoidal coordinates -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# y - The UTM northing coordinate, in meters. -# Returns: -# The footpoint latitude, in radians. -def FootpointLatitude(y): - # Precalculate n (Eq. 10.18) - n = (sm_a - sm_b) / (sm_a + sm_b) - # Precalculate alpha_ (Eq. 10.22) - # (Same as alpha in Eq. 10.17) - alpha_ = ((sm_a + sm_b) / 2.0) * (1 + (math.pow (n, 2.0) / 4) + (math.pow (n, 4.0) / 64)) - # Precalculate y_ (Eq. 10.23) - y_ = y / alpha_ - # Precalculate beta_ (Eq. 10.22) - beta_ = (3.0 * n / 2.0) + (-27.0 * math.pow (n, 3.0) / 32.0) + (269.0 * math.pow (n, 5.0) / 512.0) - # Precalculate gamma_ (Eq. 10.22) - gamma_ = (21.0 * math.pow (n, 2.0) / 16.0) + (-55.0 * math.pow (n, 4.0) / 32.0) - # Precalculate delta_ (Eq. 10.22) - delta_ = (151.0 * math.pow (n, 3.0) / 96.0) + (-417.0 * math.pow (n, 5.0) / 128.0) - # Precalculate epsilon_ (Eq. 10.22) - epsilon_ = (1097.0 * math.pow (n, 4.0) / 512.0) - # Now calculate the sum of the series (Eq. 10.21) - result = y_ + (beta_ * math.sin (2.0 * y_)) + (gamma_ * math.sin (4.0 * y_)) + (delta_ * math.sin (6.0 * y_)) + (epsilon_ * math.sin (8.0 * y_)) - return result - -# MapLatLonToXY -# Converts a latitude/longitude pair to x and y coordinates in the -# Transverse Mercator projection. Note that Transverse Mercator is not -# the same as UTM; a scale factor is required to convert between them. -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# phi - Latitude of the point, in radians. -# lambda - Longitude of the point, in radians. -# lambda0 - Longitude of the central meridian to be used, in radians. -# Returns: -# Two values, x and y: x and y coordinates of computed point, not scaled. -def MapLatLonToXY(phi, lambda1, lambda0): - x = 0.0 - y = 0.0 - # Precalculate ep2 - ep2 = (math.pow(sm_a, 2.0) - math.pow(sm_b, 2.0)) / math.pow(sm_b, 2.0) - # Precalculate nu2 - nu2 = ep2 * math.pow(math.cos(phi), 2.0) - # Precalculate N - N = math.pow(sm_a, 2.0) / (sm_b * math.sqrt(1 + nu2)) - # Precalculate t - t = math.tan(phi) - t2 = t * t - tmp = (t2 * t2 * t2) - math.pow(t, 6.0) - # Precalculate l - l = lambda1 - lambda0 - # Precalculate coefficients for l**n in the equations below - # so a normal human being can read the expressions for easting - # and northing - # -- l**1 and l**2 have coefficients of 1.0 - l3coef = 1.0 - t2 + nu2 - l4coef = 5.0 - t2 + 9 * nu2 + 4.0 * (nu2 * nu2) - l5coef = 5.0 - 18.0 * t2 + (t2 * t2) + 14.0 * nu2 - 58.0 * t2 * nu2 - l6coef = 61.0 - 58.0 * t2 + (t2 * t2) + 270.0 * nu2 - 330.0 * t2 * nu2 - l7coef = 61.0 - 479.0 * t2 + 179.0 * (t2 * t2) - (t2 * t2 * t2) - l8coef = 1385.0 - 3111.0 * t2 + 543.0 * (t2 * t2) - (t2 * t2 * t2) - # Calculate easting (x) - x = N * math.cos (phi) * l + (N / 6.0 * math.pow (math.cos (phi), 3.0) * l3coef * math.pow (l, 3.0)) + (N / 120.0 * math.pow (math.cos (phi), 5.0) * l5coef * math.pow (l, 5.0)) + (N / 5040.0 * math.pow (math.cos (phi), 7.0) * l7coef * math.pow (l, 7.0)) - # Calculate northing (y) - y = ArcLengthOfMeridian (phi) + (t / 2.0 * N * math.pow (math.cos (phi), 2.0) * math.pow (l, 2.0)) + (t / 24.0 * N * math.pow (math.cos (phi), 4.0) * l4coef * math.pow (l, 4.0)) + (t / 720.0 * N * math.pow (math.cos (phi), 6.0) * l6coef * math.pow (l, 6.0)) + (t / 40320.0 * N * math.pow (math.cos (phi), 8.0) * l8coef * math.pow (l, 8.0)) - return x,y - -# MapXYToLatLon -# TODO: Function not fixed for Python-ness 28/06/2011 -# Converts x and y coordinates in the Transverse Mercator projection to -# a latitude/longitude pair. Note that Transverse Mercator is not -# the same as UTM; a scale factor is required to convert between them. -# Reference: Hoffmann-Wellenhof, B., Lichtenegger, H., and Collins, J., -# GPS: Theory and Practice, 3rd ed. New York: Springer-Verlag Wien, 1994. -# Inputs: -# x - The easting of the point, in meters. -# y - The northing of the point, in meters. -# lambda0 - Longitude of the central meridian to be used, in radians. -# Outputs: -# philambda - A 2-element containing the latitude and longitude -# in radians. -# Returns: -# The function does not return a value. -# Remarks: -# The local variables Nf, nuf2, tf, and tf2 serve the same purpose as -# N, nu2, t, and t2 in MapLatLonToXY, but they are computed with respect -# to the footpoint latitude phif. -# -# x1frac, x2frac, x2poly, x3poly, etc. are to enhance readability and -# to optimize computations. -def MapXYToLatLon(x, y, lambda0, philambda): - # Get the value of phif, the footpoint latitude. - phif = FootpointLatitude (y) - # Precalculate ep2 - ep2 = (math.pow (sm_a, 2.0) - math.pow (sm_b, 2.0)) / math.pow (sm_b, 2.0) - # Precalculate cos (phif) - cf = math.cos (phif) - # Precalculate nuf2 - nuf2 = ep2 * math.pow (cf, 2.0) - # Precalculate Nf and initialize Nfpow - Nf = math.pow (sm_a, 2.0) / (sm_b * math.sqrt (1 + nuf2)) - Nfpow = Nf - # Precalculate tf - tf = math.tan (phif) - tf2 = tf * tf - tf4 = tf2 * tf2 - # Precalculate fractional coefficients for x**n in the equations - # below to simplify the expressions for latitude and longitude. - x1frac = 1.0 / (Nfpow * cf) - Nfpow *= Nf # now equals Nf**2 - x2frac = tf / (2.0 * Nfpow) - Nfpow *= Nf # now equals Nf**3 - x3frac = 1.0 / (6.0 * Nfpow * cf) - Nfpow *= Nf # now equals Nf**4 - x4frac = tf / (24.0 * Nfpow) - Nfpow *= Nf # now equals Nf**5 - x5frac = 1.0 / (120.0 * Nfpow * cf) - Nfpow *= Nf # now equals Nf**6 - x6frac = tf / (720.0 * Nfpow) - Nfpow *= Nf # now equals Nf**7 - x7frac = 1.0 / (5040.0 * Nfpow * cf) - Nfpow *= Nf # now equals Nf**8 - x8frac = tf / (40320.0 * Nfpow) - # Precalculate polynomial coefficients for x**n. - # -- x**1 does not have a polynomial coefficient. - x2poly = -1.0 - nuf2 - x3poly = -1.0 - 2 * tf2 - nuf2 - x4poly = 5.0 + 3.0 * tf2 + 6.0 * nuf2 - 6.0 * tf2 * nuf2 - 3.0 * (nuf2 *nuf2) - 9.0 * tf2 * (nuf2 * nuf2) - x5poly = 5.0 + 28.0 * tf2 + 24.0 * tf4 + 6.0 * nuf2 + 8.0 * tf2 * nuf2 - x6poly = -61.0 - 90.0 * tf2 - 45.0 * tf4 - 107.0 * nuf2 + 162.0 * tf2 * nuf2 - x7poly = -61.0 - 662.0 * tf2 - 1320.0 * tf4 - 720.0 * (tf4 * tf2) - x8poly = 1385.0 + 3633.0 * tf2 + 4095.0 * tf4 + 1575 * (tf4 * tf2) - # Calculate latitude - philambda[0] = phif + x2frac * x2poly * (x * x) + x4frac * x4poly * math.pow (x, 4.0) + x6frac * x6poly * math.pow (x, 6.0) + x8frac * x8poly * math.pow (x, 8.0) - # Calculate longitude - philambda[1] = lambda0 + x1frac * x + x3frac * x3poly * math.pow (x, 3.0) + x5frac * x5poly * math.pow (x, 5.0) + x7frac * x7poly * math.pow (x, 7.0) - return - -# LatLonToUTMXY -# Converts a latitude/longitude pair to x and y coordinates in the -# Universal Transverse Mercator projection. -# Inputs: -# lat - Latitude of the point, in degrees. -# lon - Longitude of the point, in degrees. -# zone - UTM zone to be used for calculating values for x and y. -# If zone is less than 1 or greater than 60, the routine -# will determine the appropriate zone from the value of lon. -# Outputs: -# xy - A 2-element array where the UTM x and y values will be stored. -# Returns: -# The UTM zone used for calculating the values of x and y. -def LatLonToUTMXY(lat, lon, zone): - east,north = MapLatLonToXY(math.radians(lat), math.radians(lon), UTMCentralMeridian (zone)) - # Adjust easting and northing for UTM system. - # magic number on the easting (500000) is the false easting - east = east * UTMScaleFactor + 500000.0 - north = north * UTMScaleFactor - # this is used to add the false northing for southern hemisphere values - if (north < 0.0): - north = north + 10000000.0 - return east,north - -# UTMXYToLatLon -# Converts x and y coordinates in the Universal Transverse Mercator -# projection to a latitude/longitude pair. -# Inputs: -# x - The easting of the point, in meters. -# y - The northing of the point, in meters. -# zone - The UTM zone in which the point lies. -# southhemi - True if the point is in the southern hemisphere; -# false otherwise. -# Outputs: -# latlon - A 2-element array containing the latitude and -# longitude of the point, in radians. -# Returns: -# The function does not return a value. -def UTMXYToLatLon(x, y, zone, southhemi, latlon): - x -= 500000.0 - x /= UTMScaleFactor - # If in southern hemisphere, adjust y accordingly. - if (southhemi): - y -= 10000000.0 - y /= UTMScaleFactor - cmeridian = UTMCentralMeridian (zone) - MapXYToLatLon (x, y, cmeridian, latlon) - return - -#------------------------------------------------------------------------------ -# Methods other than function projection stuff down here -#------------------------------------------------------------------------------ -# GET_MAG_FIELD -# This is a method which gets the magnetic field things we need -# Needs a latitude and longitude and an 'epoch' - time we want the mag field for -# A few URLs can be used to get this -# http://www.ngdc.noaa.gov/geomag/magfield.shtml -# http://www.ga.gov.au/oracle/geomag/agrfform.jsp -# example of the GA one, using AGRF -# http://www.ga.gov.au/bin/geoAGRF?latd=-24&latm=00&lats=00&lond=135&lonm=00&lons=00&elev=0&year=2010&month=01&day=1&Ein=D -# -#TODO: Make it a bit more awesome -def get_mag_field(lat, lon, year, month, day): - #some defaults so it doesn't fall over - declination = 0.0 - inclination = 0.0 - intensity = 50000.0 - #for AGRF call we need decimal degrees turned into lats and lons - latd,latm,lats = decdeg2dms(lat) - lond,lonm,lons = decdeg2dms(lon) - #assume zero elevation - elev = 0 - #successive formatting of URL to make it a bit easier to read - #urlencode doesn't work very well for some reason, but this handcoded way does - #base URL for AGRF online calculation as of 1 July 2011 - base_url = 'http://www.ga.gov.au/bin/geoAGRF?' - #latitude stuff - full_url = base_url + 'latd=' + str(latd) + '&latm=' + str(latm) + '&lats=' + str(lats) - #longitude stuff - full_url = full_url + '&lond=' + str(lond) + '&lonm=' + str(lonm) + '&lons=' + str(lons) - #elevation stuff - full_url = full_url + '&elev=' + str(elev) - #epoch stuff - full_url = full_url + '&year=' + str(year) + '&month=' + str(month) + '&day=' + str(day) - #We want three components - D is declination, I is inclination, F is total field strength - full_url = full_url + '&Ein=D&Ein=I&Ein=F' - - #debugging: what URL are we retrieving? - print 'Retrieving the following URL: ' + full_url - #open the URL, read its full contents into a variable - f = urllib.urlopen(full_url) - agrf_page_contents = f.read() - - #now we need to extract the small section of the page we're looking for - #As of 1 July 2011, it is bounced by
Magnetic Field Components
and a newline

- #Find the start string, and 41 characters to this position index to strip out - #the Magnetic Field Components sentence & formatting characteristics - start_index = agrf_page_contents.find('
Magnetic Field Components
') + 41 - #End index is easier to define - end_index = agrf_page_contents.find('\n

') - #Extract the text between the two indices we defined above - components_contents = agrf_page_contents[start_index:end_index] - #Now we can split them with a newline and
delimiter - #Will provide 3 'component' strings - for component in components_contents.split('\n
'): - #Check the first character and remove the leading characters - #and convert the extracted text to a float - #D means declination... - if component[0:1] == 'D': - declination = float(component[4:-3]) - #I means inclination... - if component[0:1] == 'I': - inclination = float(component[4:-3]) - #and F means total field intensity - if component[0:1] == 'F': - intensity = float(component[4:-3]) - return declination,inclination,intensity - -# DECDEG2DMS -# Converts a decimal degree number into degrees, minutes and seconds. -def decdeg2dms(dd): - mnt,sec = divmod(dd*3600,60) - deg,mnt = divmod(mnt,60) - return deg,mnt,sec - -#------------------------------------------------------------------------------ -# Methods other than function projection stuff down here -#------------------------------------------------------------------------------ -# GET_MAG_FIELD_DATA -# This is a method which gets the magnetic field things we need -# Needs a latitude and longitude and an 'epoch' - time we want the mag field for -# A few URLs can be used to get this -# http://www.ngdc.noaa.gov/geomag/magfield.shtml -# http://www.ga.gov.au/oracle/geomag/agrfform.jsp -# example of the GA one, using AGRF -# http://www.ga.gov.au/bin/geoAGRF?latd=-24&latm=00&lats=00&lond=135&lonm=00&lons=00&elev=0&year=2010&month=01&day=1&Ein=D -# -#TODO: Make it a bit more awesome -def get_mag_field_data(lat, lon, year, month, day): - #some defaults so it doesn't fall over - declination = 0.0 - inclination = 0.0 - intensity = 50000.0 - #for AGRF call we need decimal degrees turned into lats and lons - latd,latm,lats = decdeg2dms(lat) - lond,lonm,lons = decdeg2dms(lon) - #assume zero elevation - elev = 0 - #successive formatting of URL to make it a bit easier to read - #urlencode doesn't work very well for some reason, but this handcoded way does - #base URL for AGRF online calculation as of 1 July 2011 - base_url = 'http://www.ga.gov.au/bin/geoAGRF?' - #latitude stuff - full_url = base_url + 'latd=' + str(latd) + '&latm=' + str(latm) + '&lats=' + str(lats) - #longitude stuff - full_url = full_url + '&lond=' + str(lond) + '&lonm=' + str(lonm) + '&lons=' + str(lons) - #elevation stuff - full_url = full_url + '&elev=' + str(elev) - #epoch stuff - full_url = full_url + '&year=' + str(year) + '&month=' + str(month) + '&day=' + str(day) - #We want three components - D is declination, I is inclination, F is total field strength - full_url = full_url + '&Ein=D&Ein=I&Ein=F' - - #debugging: what URL are we retrieving? - print 'Retrieving the following URL: ' + full_url - #open the URL, read its full contents into a variable - f = urllib.urlopen(full_url) - agrf_page_contents = f.read() - - #now we need to extract the small section of the page we're looking for - #As of 1 July 2011, it is bounced by
Magnetic Field Components
and a newline

- #Find the start string, and 41 characters to this position index to strip out - #the Magnetic Field Components sentence & formatting characteristics - start_index = agrf_page_contents.find('
Magnetic Field Components
') + 41 - #End index is easier to define - end_index = agrf_page_contents.find('\n

') - #Extract the text between the two indices we defined above - components_contents = agrf_page_contents[start_index:end_index] - #Now we can split them with a newline and
delimiter - #Will provide 3 'component' strings - for component in components_contents.split('\n
'): - #Check the first character and remove the leading characters - #and convert the extracted text to a float - #D means declination... - if component[0:1] == 'D': - declination = float(component[4:-3]) - #I means inclination... - if component[0:1] == 'I': - inclination = float(component[4:-3]) - #and F means total field intensity - if component[0:1] == 'F': - intensity = float(component[4:-3]) - return declination,inclination,intensity -# autogenerated function definition -def main(): - # ------------ VEGL - Step 1 --------- - f = file(VEGLParams.getInputCsvFile(), "r") - input_csv = csv.reader(f) - data = [] - lineCount = 0 # The first 2 lines contain text and must be skipped - for strX, strY, strZ in input_csv: - if lineCount > 1: - x = float(strX) - y = float(strY) - z = float(strZ) - data.append([x,y,z]) - lineCount = lineCount + 1 - - # ------------------------------------ - - VEGLSelectedBox = VEGLParams.getSelectedBounds() - zone = int(VEGLSelectedBox.getSrs()) - temp_data = [] - for x, y, z in data: - newX, newY = project(x, y, zone) - temp_data.append([newX, newY, z]) - data = temp_data - - temp_data = [] - for x, y, z in data: - # isPointInsideArea happens to read northings then eastings, and we store - # northings as y, and eastings as - if VEGLSelectedBox.isPointInsideArea(y,x): - temp_data.append([x,y,z]) - data = temp_data - - # If we have a magnetic inversion, we need to define the magnetic field properties. - # General convention is to assign the magnetic properties associated with the - # middle of the inversion area. - # - central_lat = (VEGLSelectedBox.getMaxNorthing() + VEGLSelectedBox.getMinNorthing()) / 2 - central_lon = (VEGLSelectedBox.getMaxEasting() + VEGLSelectedBox.getMinEasting()) / 2 - declination,inclination,intensity = get_mag_field_data(central_lat, central_lon,2010,01,01) - - # UBC-GIF needs a data file in a specific format. - # We need to define a filename ('obs_filename'). - # This includes writing out expected errors in the data, number of data points etc. - print 'Time to write out a data file' - obs_file = 'temp_ubc_obs.asc' - f = file(obs_file, 'w') - f.write(str(len(data)) + '\t! Number of points\n') - # For each data point, write out: Easting, Northing, Elevation, Data, Error - # In this simple example, we assume elevation is 1 m, and error are 2 mGal / nT - for east,north,prop in data: - elevation = 1.0 - error = 2.0 - f.write(str(east) + ' ' + str(north) + ' ' + str(elevation) + ' ' + str(prop) + ' ' + str(error) + '\n') - f.close() - # Step 6: calculate some meshy stuff - # --- Scientific description below --- - # Defines the mesh parameters and writes out a UBC-GIF mesh file. - # Mesh is defined by the minimum and maximum eastings and northings, inversion depth, and respective cell sizes. - # Mesh file name: 'mesh' - minEasting = VEGLSelectedBox.getMinEasting() - maxEasting = VEGLSelectedBox.getMaxEasting() - minNorthing = VEGLSelectedBox.getMinNorthing() - maxNorthing = VEGLSelectedBox.getMaxNorthing() - invDepth = VEGLParams.getInversionDepth() - cell_x = VEGLParams.getCellX() - cell_y = VEGLParams.getCellY() - cell_z = VEGLParams.getCellZ() - num_x_cells = int((maxEasting - minEasting) / cell_x) - num_y_cells = int((maxNorthing - minNorthing) / cell_y) - num_z_cells = int(invDepth / cell_z) - print 'Number of cells in x dimension: ' + str(num_x_cells) + ', number of cells in y dimension: ' + str(num_y_cells) + ' and number of cells in z dimension: ' + str(num_z_cells) - # Define mesh file name here - mesh = 'mesh.msh' - try: - f = file(mesh, 'w') - f.write(str(num_x_cells) + ' ' + str(num_y_cells) + ' ' + str(num_z_cells) + '\n') - f.write(str(minEasting) + ' ' + str(minNorthing) + ' 0\n') - f.write(str(num_x_cells) + '*' + str(cell_x) + '\n') - f.write(str(num_y_cells) + '*' + str(cell_y) + '\n') - f.write(str(num_z_cells) + '*' + str(cell_z)) - f.close() - except IOError, e: - print e - sys.exit(1) - - # Step 7: Write out sensitivity analsysis control file - # --- Scientific description below --- - # There are two parts to running a UBC-GIF inversion. The first involves a sensitivity analysis; - # here we write out the appropriate control files for this analysis. - # File names for things defined outside this method are defined at the top - obs_file = 'temp_ubc_obs.asc' - mesh = 'mesh.msh' - # Sensitivity analysis (*sen3d_MPI) input file - sns_inp = 'sens.inp' - - # Write some files - try: - f = file(sns_inp, 'w') - f.write(mesh + ' ! mesh\n') - f.write(obs_file + ' ! observations file\n') - f.write('null ! topography\n') - f.write('2 ! iwt\n') - f.write('null ! beta, znot\n') - f.write('daub2 ! wavelet\n') - f.write('2 1e-4 ! itol eps\n') - f.close() - - except IOError, e: - print e - sys.exit(1) - - # Step 8: Write out inversion control file - # --- Scientific description below --- - # In the second part to running a UBC-GIF inversion, we need to write out - # the control file for the actual inversion. - # File names for things defined outside this method are defined at the top - obs_file = 'temp_ubc_obs.asc' - inv_inp = 'inv.inp' - try: - f = file(inv_inp, 'w') - f.write('0 !irest\n') - f.write('1 !mode\n') - f.write('1 0.02 !par tolc\n') - f.write(obs_file + ' ! observations file\n') - # file name dependant on type set in JS - f.write('mzinv3d.mtx\n') - f.write('null !initial model\n') - f.write('null !reference model\n') - f.write('null !active cell file\n') - f.write('null !lower, upper bounds\n') - f.write('null Le, Ln, Lz\n') - f.write('SMOOTH_MOD\n') - f.write('null !weighting file\n') - f.write('0\n') - f.close() - except IOError, e: - print e - sys.exit(1) - - # step 9: finalise stuff - I guess this is where we execute two commands - # At a guess, they are the two commented-out lines below? - # Control files, defined elsewhere - sns_inp = 'sens.inp' - sns_out = 'sens.out' - inv_inp = 'inv.inp' - inv_out = 'inv.out' - sensitivity_command = 'mpirun -np ${n-threads} --mca btl self,sm /opt/ubc/gzsen3d_MPI ' + sns_inp + ' > ' + sns_out - inversion_command = 'mpirun -np ${n-threads} --mca btl self,sm /opt/ubc/gzinv3d_MPI ' + inv_inp + ' > ' + inv_out - print 'Sensitivity command: ' + sensitivity_command - print 'Inversion command: ' + inversion_command - sys.stdout.flush() - retcode = subprocess.call(sensitivity_command, shell=True) - print 'sensitivity returned: ' + str(retcode) - sys.stdout.flush() - retcode = subprocess.call(inversion_command, shell=True) - print 'inversion returned: ' + str(retcode) - sys.stdout.flush() - # Upload our logging outs - cloudUpload(sns_out, sns_out) - cloudUpload(inv_out, inv_out) - # Upload the mesh file - cloudUpload(mesh, mesh) - # Upload gravity or magnetic data file - denFiles = glob.glob('*zinv3d*.den') - preFiles = glob.glob('*zinv3d*.pre') - # Upload Final Model + Prediction - print 'Uploading final model and prediction' - invFilesToUpload = [] - if len(denFiles) > 0: - denFiles.sort() - invFilesToUpload.append(denFiles[len(denFiles) - 1]) - if len(preFiles) > 0: - preFiles.sort() - invFilesToUpload.append(preFiles[len(preFiles) - 1]) - print 'About to upload the following files:' - print invFilesToUpload - for invFile in invFilesToUpload: - cloudUpload(invFile, invFile) - -# autogenerated main definition -if __name__ == "__main__": - main() - diff --git a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/underworld-gocad.py b/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/underworld-gocad.py deleted file mode 100644 index 22973b9ce..000000000 --- a/src/main/resources/org/auscope/portal/server/scriptbuilder/templates/underworld-gocad.py +++ /dev/null @@ -1,73 +0,0 @@ -""" - The following are additional unused uwGocadGAMaterialsModelGenerator.sh parameters - - --defaultConductivity=(1.) Default thermal conductivity to be used in regions where voxets do not specify a value. - --defaultHeatProduction=(0.) Default heat production to be used in regions where voxets do not specify a value. - --minX=(FromVoxet) Minimum domain X coordinate. If available, default is value from voxet definition. - --maxX=(FromVoxet) Maximum domain X coordinate. If available, default is value from voxet definition. - --minY=(FromVoxet) Minimum domain Y coordinate. If available, default is value from voxet definition. - --maxY=(FromVoxet) Maximum domain Y coordinate. If available, default is value from voxet definition. - --minZ=(FromVoxet) Minimum domain Z coordinate. If available, default is value from voxet definition. - --maxZ=(FromVoxet) Maximum domain Z coordinate. If available, default is value from voxet definition. - - The following are the descriptions for the remaining uwGocadGAMaterialsModelGenerator.sh parameters - - --useLowerFluxBC Use a lower boundary heat flux (Neumann) boundary condition? - --name=(GAGocadGeothermal) A name for your simulation - --voxetFilename= Filename for the voxet dataset - --elementResI=(16) Number of FEM elements along X axis - --elementResJ=(16) Number of FEM elements along Y axis - --elementResK=(16) Number of FEM elements along Z axis - --lowerBoundaryTemp=(300) Lower boundary (minZ) temperature BC value. This value is ignored if --useLowerFluxBC is set. - --upperBoundaryTemp=(0) Upper boundary (maxZ) temperature BC value - --lowerBoundaryFlux=(0.0125) Lower boundary (minZ) heat flux value. This value is ignored if --useLowerFluxBC is not set. - --nprocs=(1) Number of processors to run job with - --bin=(/usr/local/underworld/ImporteUnderworld binary executable, including absolute path - --materialsPropName= Name of voxet property corresponding to the material definitions - --keyFilename= Name of your materials key CSV file. Data be in format: Name, Index, Conductivity, HeatProduction. First line a header. - -""" - - -import subprocess, csv, math, os, sys, urllib, glob; -import logging - - -def cloudUpload(inFilePath, cloudKey): - cloudBucket = os.environ["STORAGE_BUCKET"] - cloudDir = os.environ["STORAGE_BASE_KEY_PATH"] - queryPath = (cloudBucket + "/" + cloudDir + "/" + cloudKey).replace("//", "/") - retcode = subprocess.call(["cloud", "upload", cloudKey, inFilePath, "--set-acl=public-read"]) - print "cloudUpload: " + inFilePath + " to " + queryPath + " returned " + str(retcode) - -#Build the default list of arguments -argList = ["uwGocadGAMaterialsModelGenerator.sh", "--voxetFilename=${voxet-filename}", "--materialsPropName=${materials-property}", "--keyFilename=${voxet-key}", "--nprocs=${n-threads}", "--run"] - -#Add optional arguments -lowerBoundaryFlux = "${lower-boundary-flux}" -lowerBoundaryTemp = "${lower-boundary-temp}" -if (len(lowerBoundaryFlux) > 0): - argList.append("--useLowerFluxBC") - argList.append("--lowerBoundaryFlux=" + lowerBoundaryFlux) - -if (len(lowerBoundaryTemp) > 0): - argList.append("--lowerBoundaryTemp=" + lowerBoundaryTemp) - -argList.append("--elementResI=${n-fem-x}") -argList.append("--elementResJ=${n-fem-y}") -argList.append("--elementResK=${n-fem-z}") - - -#Begin processing -print argList -retcode = subprocess.call(argList) -print "result: " + str(retcode) - -#Upload results directory -dirName = "output_GAGocadGeothermal/" -print "Fetching output files" -outputFiles = glob.glob(dirName + "*") -print outputFiles -for invFile in outputFiles: - cloudUpload(invFile, invFile.replace(dirName, "")) -print "Done" diff --git a/src/main/resources/org/auscope/portal/server/web/controllers/vl-bootstrap.sh b/src/main/resources/org/auscope/portal/server/web/controllers/vl-bootstrap.sh deleted file mode 100644 index cfec3e5d3..000000000 --- a/src/main/resources/org/auscope/portal/server/web/controllers/vl-bootstrap.sh +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env bash -# chkconfig: 2345 90 10 -# description: vl-bootstrap.sh - Shell Script for managing the download and running of the VL portal workflow script -# It is expected that the the VL Portal will customise this script with appropriate values for each job - -# Some constants -export VGL_BOOTSTRAP_VERSION="2" -export WORKING_DIR="/root" -export WORKFLOW_SCRIPT="$WORKING_DIR/vl.sh" -export SHUTDOWN_SCRIPT="$WORKING_DIR/vl-shutdown.sh" - -# These will be replaced with hardcoded values by the VL Portal (varying for each job) -export STORAGE_BUCKET="{0}" -export STORAGE_BASE_KEY_PATH="{1}" -export STORAGE_ACCESS_KEY="{2}" -export STORAGE_SECRET_KEY="{3}" -export WORKFLOW_URL="{4}" -export STORAGE_ENDPOINT="{5}" -export STORAGE_TYPE="{6}" -export STORAGE_AUTH_VERSION="{7}" -export OS_REGION_NAME="{8}" -export SHUTDOWN_URL="{10}" -export WALLTIME="{11}" -export VL_LOG_FILE_NAME="vl.sh.log" -export VL_LOG_FILE="$WORKING_DIR/$VL_LOG_FILE_NAME" -export VL_TERMINATION_FILE_NAME="vl.end" -export VL_TERMINATION_FILE="$WORKING_DIR/$VL_TERMINATION_FILE_NAME" - -# Load our profile so this run is the same as a regular user login (to make debugging easier) -source /etc/profile - -echo "------ VL Bootstrap Script ---------" -echo " " -echo "------ Provisioning ----------" -echo "" -{9} -echo "" -echo "------ Printing Environment ----------" -echo "VL_BOOTSTRAP_VERSION = $VL_BOOTSTRAP_VERSION" -echo "WORKING_DIR = $WORKING_DIR" -echo "WORKFLOW_URL = $WORKFLOW_URL" -echo "WORKFLOW_SCRIPT = $WORKFLOW_SCRIPT" -echo "PATH = $PATH" -echo "LD_LIBRARY_PATH = $LD_LIBRARY_PATH" -echo "STORAGE_ENDPOINT = $STORAGE_ENDPOINT" -echo "STORAGE_TYPE = $STORAGE_TYPE" -echo "VL_LOG_FILE = $VL_LOG_FILE" -if [ "$WALLTIME" -gt 0 ]; then - echo "SHUTDOWN_SCRIPT = $SHUTDOWN_SCRIPT" - echo "WALLTIME = $WALLTIME" -else - echo "Walltime not set or set to 0" -fi - -echo "VL_TERMINATION_FILE = $VL_TERMINATION_FILE" -echo "--------------------------------------" - -# If a walltime is present, set walltime shutdown parameters -if [ "$WALLTIME" -gt 0 ]; then - #Download shutdown script and make it executable - echo "Downloading shutdown script from $SHUTDOWN_URL and storing it at $SHUTDOWN_SCRIPT" - curl -f -L "$SHUTDOWN_URL" -o "$SHUTDOWN_SCRIPT" - echo "curl result $?" - echo "Making $SHUTDOWN_SCRIPT executable" - chmod +x "$SHUTDOWN_SCRIPT" - echo "chmod result $?" - at -f $SHUTDOWN_SCRIPT now + $WALLTIME min | tee -a "$VL_LOG_FILE" -else - echo "Skipping download of walltime shutdown script" - echo "Job will execute without walltime limit" -fi - -#Download our workflow and make it executable -echo "Downloading workflow script from $WORKFLOW_URL and storing it at $WORKFLOW_SCRIPT" -curl -f -L "$WORKFLOW_URL" -o "$WORKFLOW_SCRIPT" -echo "curl result $?" -echo "Making $WORKFLOW_SCRIPT executable" -chmod +x "$WORKFLOW_SCRIPT" -echo "chmod result $?" - -echo "executing workflow script $WORKFLOW_SCRIPT" - -# If we have unbuffer - lets use that to get stdout as it gets written -# otherwise we just get the buffered version -if command -v unbuffer > /dev/null 2>&1 ; then - unbuffer $WORKFLOW_SCRIPT 2>&1 | tee -a "$VL_LOG_FILE" -else - $WORKFLOW_SCRIPT 2>&1 | tee -a "$VL_LOG_FILE" -fi - - - diff --git a/src/main/resources/org/auscope/portal/server/web/controllers/vl-cloudformation.json.tpl b/src/main/resources/org/auscope/portal/server/web/controllers/vl-cloudformation.json.tpl deleted file mode 100644 index 4963d625c..000000000 --- a/src/main/resources/org/auscope/portal/server/web/controllers/vl-cloudformation.json.tpl +++ /dev/null @@ -1,333 +0,0 @@ -{ - "AWSTemplateFormatVersion": "2010-09-09", - "Metadata": { - "AWS::CloudFormation::Designer": { - "e0560b9a-0f01-4f0f-b8f9-6914d40d922c": { - "size": { - "width": 60, - "height": 60 - }, - "position": { - "x": 270, - "y": 110 - }, - "z": 0, - "embeds": [], - "isassociatedwith": [ - "4eb02d3e-4a98-43fa-953c-ac0ab2114d3e" - ] - }, - "9d50e08c-5955-4bee-b1b4-77891f305c1a": { - "source": { - "id": "46ea9ac6-9838-436b-ad98-c81dff7992ef" - }, - "target": { - "id": "e0560b9a-0f01-4f0f-b8f9-6914d40d922c" - }, - "z": 1 - }, - "f8a870e9-1ab3-4be2-a75b-407ab2a8f9ad": { - "size": { - "width": 60, - "height": 60 - }, - "position": { - "x": 310, - "y": 190 - }, - "z": 0, - "embeds": [], - "isassociatedwith": [ - "7dd8e728-2bd1-401f-a0fb-94d304415d4e" - ] - }, - "98215be4-bacc-40fc-ac50-327117b3b1ef": { - "source": { - "id": "8d56cc5c-9650-4fc1-9d3e-92d7f1fb91e3" - }, - "target": { - "id": "f8a870e9-1ab3-4be2-a75b-407ab2a8f9ad" - }, - "z": 11 - }, - "4eb02d3e-4a98-43fa-953c-ac0ab2114d3e": { - "size": { - "width": 60, - "height": 60 - }, - "position": { - "x": 130, - "y": 110 - }, - "z": 0, - "embeds": [] - }, - "5397347e-d4aa-4bcd-95af-a68e1c1155a2": { - "source": { - "id": "4eb02d3e-4a98-43fa-953c-ac0ab2114d3e" - }, - "target": { - "id": "e0560b9a-0f01-4f0f-b8f9-6914d40d922c" - }, - "z": 11 - }, - "1588908d-a70c-41fc-9867-c965314844cf": { - "source": { - "id": "e0560b9a-0f01-4f0f-b8f9-6914d40d922c" - }, - "target": { - "id": "4eb02d3e-4a98-43fa-953c-ac0ab2114d3e" - }, - "z": 12 - }, - "0800cdda-d910-4063-908f-f883147151a6": { - "size": { - "width": 60, - "height": 60 - }, - "position": { - "x": 400, - "y": 110 - }, - "z": 0, - "embeds": [], - "isassociatedwith": [ - "e0560b9a-0f01-4f0f-b8f9-6914d40d922c" - ] - }, - "1cfb8483-d86b-4745-9a69-f2922b92f06a": { - "source": { - "id": "0800cdda-d910-4063-908f-f883147151a6" - }, - "target": { - "id": "e0560b9a-0f01-4f0f-b8f9-6914d40d922c" - }, - "z": 11 - }, - "7dd8e728-2bd1-401f-a0fb-94d304415d4e": { - "size": { - "width": 60, - "height": 60 - }, - "position": { - "x": 130, - "y": 190 - }, - "z": 0, - "embeds": [] - }, - "47295e75-6f30-4212-a122-a94654b804e2": { - "source": { - "id": "f8a870e9-1ab3-4be2-a75b-407ab2a8f9ad" - }, - "target": { - "id": "7dd8e728-2bd1-401f-a0fb-94d304415d4e" - }, - "z": 11 - }, - "5ea78b1b-2eab-4665-b6a1-dbbbcc8160ff": { - "source": { - "id": "e0560b9a-0f01-4f0f-b8f9-6914d40d922c" - }, - "target": { - "id": "4eb02d3e-4a98-43fa-953c-ac0ab2114d3e" - }, - "z": 11 - } - } - }, - "Resources": { - "AnvglStsRole": { - "Type": "AWS::IAM::Role", - "Properties": { - "AssumeRolePolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::${awsAccount}" - }, - "Action": "sts:AssumeRole", - "Condition": { - "StringEquals": { - "sts:ExternalId": "${awsSecret}" - } - } - } - ] - }, - "ManagedPolicyArns": [ - { - "Ref": "AnvglStsPolicy" - } - ] - }, - "Metadata": { - "AWS::CloudFormation::Designer": { - "id": "f8a870e9-1ab3-4be2-a75b-407ab2a8f9ad" - } - } - }, - "AnvglS3Policy": { - "Type": "AWS::IAM::ManagedPolicy", - "Properties": { - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "Stmt1453960831000", - "Effect": "Allow", - "Action": [ - "s3:*" - ], - "Resource": [ - "arn:aws:s3:::${s3Bucket}*" - ] - } - ] - } - }, - "Metadata": { - "AWS::CloudFormation::Designer": { - "id": "4eb02d3e-4a98-43fa-953c-ac0ab2114d3e" - } - } - }, - "AnvglS3InstanceProfile": { - "Type": "AWS::IAM::InstanceProfile", - "Properties": { - "Roles": [ - { - "Ref": "AnvglS3Role" - } - ] - }, - "Metadata": { - "AWS::CloudFormation::Designer": { - "id": "0800cdda-d910-4063-908f-f883147151a6" - } - } - }, - "AnvglStsPolicy": { - "Type": "AWS::IAM::ManagedPolicy", - "Properties": { - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": [ - "iam:PassRole" - ], - "Resource": [ - "arn:aws:iam::*:role/*-AnvglS3Role-*" - ] - }, - { - "Effect": "Allow", - "Action": [ - "ec2:CreateTags", - "ec2:DeleteTags", - "ec2:GetConsoleOutput", - "ec2:DescribeInstanceStatus", - "ec2:ModifyInstanceAttribute", - "ec2:DescribeImages" - ], - "Resource": [ - "*" - ] - }, - { - "Effect": "Allow", - "Action": [ - "ec2:RunInstances", - "ec2:StartInstances", - "ec2:StopInstances", - "ec2:TerminateInstances" - ], - "Resource": [ - "arn:aws:ec2:*:*:image/*", - "arn:aws:ec2:*:*:instance/*", - "arn:aws:ec2:*:*:network-interface/*", - "arn:aws:ec2:*:*:security-group/*", - "arn:aws:ec2:*:*:key-pair/*", - "arn:aws:ec2:*:*:subnet/*", - "arn:aws:ec2:*:*:volume/*" - ] - }, - { - "Effect": "Allow", - "Action": [ - "s3:*" - ], - "Resource": [ - "arn:aws:s3:::${s3Bucket}*" - ] - } - ] - } - }, - "Metadata": { - "AWS::CloudFormation::Designer": { - "id": "7dd8e728-2bd1-401f-a0fb-94d304415d4e" - } - } - }, - "AnvglS3Role": { - "Type": "AWS::IAM::Role", - "Properties": { - "AssumeRolePolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Principal": { - "Service": "ec2.amazonaws.com" - }, - "Action": "sts:AssumeRole" - } - ] - }, - "ManagedPolicyArns": [ - { - "Ref": "AnvglS3Policy" - } - ] - }, - "Metadata": { - "AWS::CloudFormation::Designer": { - "id": "e0560b9a-0f01-4f0f-b8f9-6914d40d922c" - } - } - } - }, - "Outputs": { - "AnvglAccessARN": { - "Description": "ARN for the role which is used to access the client AWS account.", - "Value": { "Fn::Join": [ - "", - [ - "arn:aws:iam::", - { "Ref": "AWS::AccountId" }, - ":role/", - { "Ref": "AnvglStsRole" } - ] - ] - } - }, - "JobInstanceARN": { - "Description": "ARN for the instance profile which is used when processing user jobs.", - "Value": { "Fn::Join": [ - "", - [ - "arn:aws:iam::", - { "Ref": "AWS::AccountId" }, - ":instance-profile/", - { "Ref": "AnvglS3InstanceProfile" } - ] - ] - } - } - } -} \ No newline at end of file diff --git a/src/main/resources/org/auscope/portal/server/web/controllers/vl-provisioning.sh b/src/main/resources/org/auscope/portal/server/web/controllers/vl-provisioning.sh deleted file mode 100644 index 1dc188fd3..000000000 --- a/src/main/resources/org/auscope/portal/server/web/controllers/vl-provisioning.sh +++ /dev/null @@ -1,185 +0,0 @@ -# Shell script template for provisioning a portal vm if required -# -# Content taken from installPuppet-{centos,debian}.sh. -# -# -# ///////////////////////////// -# ANVGL Portal Custom Modules - download from user specified GIT (or default) -# -# Edit these variables if you need to download from a different git -# repo/branch. -# -# ///////////////////////////// - -# baseUrl -- git repository url -baseUrl="https://github.com/AuScope/ANVGL-Portal.git" - -# branch -- branch in the git repo -branch="master" - -# pathSuffix -- path to puppet modules in the repo -pathSuffix="/vm/puppet/modules/" - -# Install puppet itself if not already available -if hash puppet 2>/dev/null; then - echo "Puppet version $(puppet --version ) already installed." - if [ -f /etc/debian_version ]; then - sudo apt-get update - sudo apt-get install -y --force-yes at - else - sudo rpm -ivh http://yum.puppetlabs.com/el/6/products/x86_64/puppetlabs-release-6-7.noarch.rpm - sudo yum install -y at - fi -else - # Determine what OS we're using so we install appropriately - # Checks for a debian based system, or assumes rpm based - if [ -f /etc/debian_version ]; then - sudo apt-get update - sudo apt-get install -y --force-yes puppet at - else - sudo rpm -ivh http://yum.puppetlabs.com/el/6/products/x86_64/puppetlabs-release-6-7.noarch.rpm - yum install -y puppet at - fi -fi - -# Pip -if hash pip 2>/dev/null; then - echo "Pip already installed." -else - # Determine what OS we're using so we install appropriately - # Checks for a debian based system, or assumes rpm based - if [ -f /etc/debian_version ]; then - sudo apt-get install -y python-pip - else - yum install -y python-pip - fi -fi - -# Swift -if hash swift 2>/dev/null; then - echo "Swift already installed." -else - # Determine what OS we're using so we install appropriately - # Checks for a debian based system, or assumes rpm based - if [ -f /etc/debian_version ]; then - sudo apt-get install -y python-swiftclient - else - yum install -y python-swiftclient - fi -fi - -#sudo sh -c 'echo " server = master.local" >> /etc/puppet/puppet.conf' -#sudo service puppet restart -#sudo chkconfig puppet on - -#///////////////////////////// -#Install Additional Modules -#///////////////////////////// -# Puppet simply reports already installed modules, so this is safe -# Puppet Forge Modules -puppet module install stahnma/epel -if [ $? -ne 0 ] -then - echo "Failed to install puppet module stahnma/epel" - exit 1 -fi - -puppet module install example42/puppi -if [ $? -ne 0 ] -then - echo "Failed to install puppet module example42/puppi" - exit 1 -fi - -puppet module install jhoblitt/autofsck -if [ $? -ne 0 ] -then - echo "Failed to install puppet module jhoblitt/autofsck" - exit 1 -fi - -#///////////////////////////// -# Clone specified git repository into $tmpModulesDir and install puppet modules. -# -# First checks whether the vl modules are already available. -#///////////////////////////// - -# Directory where vl modules will be installed -# Should we install into the "code" subdir? -moduleDirPrefix="/etc/puppet/code" -if [ ! -d "$moduleDirPrefix" ]; then - # Ensure /etc/puppet exists, and try installing modules into that - moduleDirPrefix="/etc/puppet" - if [ ! -d "$moduleDirPrefix" ]; then - echo "/etc/puppet directory does not exist, puppet install not found." - exit 1 - fi -fi - -# Ensure the modules subdirectory exists -moduleDir="$moduleDirPrefix/modules" -if [ ! -d "$moduleDir" ]; then - echo "Creating the 'modules' subdirectory for puppet." - sudo mkdir -p "$moduleDir" -fi - -if [ ! -d "$moduleDir/vl_common" ]; then - echo "Installing vl common modules into $moduleDir/vl_common" - if [ -f /etc/debian_version ]; then - sudo apt-get install -y --force-yes wget git - else - sudo yum install -y wget git - fi - - # Assumes our temp dir does not already have content! - tmpModulesDir="/opt/vgl/modules" - if [ "$1" != "" ] - then - baseUrl="$1" - fi - if [ "$2" != "" ] - then - pathSuffix="$2" - fi - - #Ensure suffix doesn't start with a '/' - if [ `head -c 2 <<< "$pathSuffix"` != "/" ] - then - pathSuffix=`tail -c +2 <<< "$pathSuffix"` - fi - - # Clone the git repository into $tmpModulesDir so we can extract the - # puppet modules. Make sure to use the correct branch! - mkdir -p "$tmpModulesDir" - git clone "$baseUrl" "$tmpModulesDir" - cd "$tmpModulesDir" - git checkout "$branch" - - #Now copy the modules to the puppet module install directory - cp -r "$tmpModulesDir/$pathSuffix/vl_common" "$moduleDir" - if [ $? -ne 0 ] - then - echo "Failed copying to puppet module directory - aborting" - exit 2 - fi - - # Don't tidy up until we're sure this approach works with cloud-init - # # Tidy up - # rm -rf "$tmpModulesDir" -else - echo "Common vl modules found in $moduleDir/vl_common" -fi - -# ///////////////////////////// -# Make sure we are provisioned -# ///////////////////////////// - -# cd back out of the deleted directory to avoid issues with puppet application -cd "${WORKING_DIR}" - -# Apply puppet modules -# TODO: template this so the portal can pass in provisioning from SSC -puppet apply <

Disclaimer

diff --git a/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-download.job.tpl b/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-download.job.tpl deleted file mode 100644 index 8af9b1ba2..000000000 --- a/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-download.job.tpl +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash -#PBS -P {0} -#PBS -q copyq -#PBS -l walltime={4} -#PBS -l mem=300MB -#PBS -l ncpus=1 -#PBS -l wd -#PBS -l storage=scratch/{0}+gdata/{0} -#PBS -j oe -#PBS -N vl{1} -#PBS -o {3}/.download.log - -# This batch file is expected to be copied into and then run directly from the VL_OUTPUT_DIR -# It is responsible for downloading all remote data services into the working directory -# And then submitting the actual run job - -export VL_PROJECT_ID="{0}" -export VL_JOB_ID="{1}" -export VL_WORKING_DIR="{2}" -export VL_OUTPUT_DIR="{3}" -export VL_TERMINATION_FILE="$VL_OUTPUT_DIR/vl.end" -export VL_JOBID_FILE="$VL_OUTPUT_DIR/.jobid" -export VL_WORKFLOW_VERSION="1" - -echo "#### Download Environment start ####" -env | sort -echo "#### Download Environment end ####" - -source nci-util.sh - -echo $PBS_JOBID > $VL_JOBID_FILE - -# Set our workflow version to indicate that the job is running -echo "$VL_WORKFLOW_VERSION" > "$VL_OUTPUT_DIR/workflow-version.txt" || finish 2 "ERROR: Set workflow version in $VL_WORKING_DIR/workflow-version.txt" - -# Create our working directory to receive downloaded data -mkdir -p "$VL_WORKING_DIR" || finish 2 "ERROR: Unable to create $VL_WORKING_DIR" -cp "$VL_OUTPUT_DIR/vl-download.sh" "$VL_WORKING_DIR" || finish 2 "ERROR: Unable to copy $VL_OUTPUT_DIR/vl-download.sh to $VL_WORKING_DIR" -cd "$VL_WORKING_DIR" || finish 2 "ERROR: Unable to access $VL_WORKING_DIR" - -echo "#### Download start ####" -downloadStartTime=`date +%s` -source vl-download.sh -downloadEndTime=`date +%s` -totalDownloadTime=`expr $downloadEndTime - $downloadStartTime` -echo "Total download time was `expr $totalDownloadTime / 3600` hour(s), `expr $totalDownloadTime % 3600 / 60` minutes and `expr $totalDownloadTime % 60` seconds" -echo "#### Download end ####" - -# Submit our actual processing job -cd "$VL_OUTPUT_DIR" || finish 2 "ERROR: Unable to return to $VL_OUTPUT_DIR" -RAWID=`qsub nci-run.job` || finish 3 "ERROR: Unable to submit nci-run.job" -echo "$'{'RAWID%.*'}'" > $VL_JOBID_FILE diff --git a/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-run.job.tpl b/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-run.job.tpl deleted file mode 100644 index 941a8f6ba..000000000 --- a/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-run.job.tpl +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash -#PBS -P {0} -#PBS -q normal -#PBS -l walltime={4} -#PBS -l mem={6} -#PBS -l jobfs={7} -#PBS -l ncpus={5} -#PBS -l wd -#PBS -l storage=scratch/{0}+gdata/{0}{10} -#PBS -j oe -#PBS -N vl{1} -#PBS -o {3}/.run.log - -# This batch file is expected to be copied into and then run directly from the VL_OUTPUT_DIR -# It is responsible for running the users job script in an environment with all modules loaded -# and data downloaded. The script will write all pertinent output files to VL_OUTPUT_DIR - -#Redirect all output to our log file (after preserving the current contents) -echo "stdout/stderr to be redirected to {3}/vl.sh.log" -DL_LOG_CONTENT=`cat "{3}/.download.log"` -echo "" > "{3}/vl.sh.log" -exec >> "{3}/vl.sh.log" -exec 2>&1 -echo "$DL_LOG_CONTENT" - -export VL_PROJECT_ID="{0}" -export VL_JOB_ID="{1}" -export VL_WORKING_DIR="{2}" -export VL_OUTPUT_DIR="{3}" -export VL_TERMINATION_FILE="$VL_OUTPUT_DIR/vl.end" -export VL_TOTAL_CPU_COUNT="{5}" -if [ `expr $VL_TOTAL_CPU_COUNT % 16` -eq "0" ]; then - export VL_TOTAL_NODES=`expr $VL_TOTAL_CPU_COUNT / 16` -else - export VL_TOTAL_NODES=`expr $VL_TOTAL_CPU_COUNT / 16 + 1` -fi -export VL_CPUS_PER_NODE=`expr $VL_TOTAL_CPU_COUNT / $VL_TOTAL_NODES` - -echo "#### Compute Environment start ####" -env | sort -echo "#### Compute Environment end ####" - -source nci-util.sh - -# Move our working data to the job node file system -cp -r "$VL_OUTPUT_DIR/." "$VL_WORKING_DIR" || finish 2 "ERROR: Unable to copy data from $VL_OUTPUT_DIR to working dir at $VL_WORKING_DIR" -cd "$VL_WORKING_DIR" || finish 2 "ERROR: Unable to access working directory at $VL_WORKING_DIR" - -# Load Modules -module purge -{8} - -## Emulate our "cloud" command line tool -#export PATH="$VL_WORKING_DIR:$PATH" -#echo ''#!/bin/bash'' > cloud -#echo ''cp "$3" "$VL_OUTPUT_DIR/$2"'' >> cloud -#chmod +x cloud - -# Run User Script -echo "#### Python start ####" -computeStartTime=`date +%s` -{9} "vl_script.py" -computeEndTime=`date +%s` -echo "#### Python end ####" - -echo "#### Compute Time start ####" -totalComputeTime=`expr $computeEndTime - $computeStartTime` -echo "Total compute time was `expr $totalComputeTime / 3600` hour(s), `expr $totalComputeTime % 3600 / 60` minutes and `expr $totalComputeTime % 60` seconds" -echo "#### Compute Time end ####" - -# Tidy up -finish 0 "INFO: Finished run job" diff --git a/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-util.sh b/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-util.sh deleted file mode 100644 index 9067bf85f..000000000 --- a/src/main/resources/org/auscope/portal/server/web/service/cloud/nci-util.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -export VL_WORKFLOW_VERSION="1" - -function finish { - if [ -d "$VL_WORKING_DIR" ] && [ ${#VL_WORKING_DIR} -gt 4 ] - then - rm -rf "$VL_WORKING_DIR" - fi - - date > "$VL_TERMINATION_FILE" - - if [ "$2" != "" ] - then - log "$2" - fi - - exit $1 -} - -function log { - timestamp="`date +%Y-%m-%d:%H:%M:%S`" - echo "$PBS_JOBID $timestamp $@" -} - -function cloud { - if [ "$1" == "upload" ] - then - cp -r "$3" "$VL_OUTPUT_DIR/$2" || finish 3 "ERROR: Unable to copy file $3 to output directory $VL_OUTPUT_DIR/$2" - fi -} diff --git a/src/main/resources/org/auscope/portal/server/web/service/monitor/templates/job-completion.tpl b/src/main/resources/org/auscope/portal/server/web/service/monitor/templates/job-completion.tpl deleted file mode 100644 index 40d42ce88..000000000 --- a/src/main/resources/org/auscope/portal/server/web/service/monitor/templates/job-completion.tpl +++ /dev/null @@ -1,24 +0,0 @@ -Dear ${userName}, - -Your VGL job has finished processing. - -Job details: ------------- -Job Status: ${status} -Job id: ${jobId} -Job folder: ${seriesName} -Job name: ${jobName} -Job description: ${jobDescription} -Date & time submitted: ${dateSubmitted} -Date & time processed (est.): ${dateProcessed} -Time elapsed (est.): ${timeElapsed} - -Please go to ${portalUrl} to download or publish your job results. - -Yours sincerely, -${emailSender} - -Job execution log: ------------------- -${jobExecLogSnippet} - diff --git a/src/test/java/org/auscope/portal/jmock/VEGLJobMatcher.java b/src/test/java/org/auscope/portal/jmock/VEGLJobMatcher.java deleted file mode 100644 index 764375cad..000000000 --- a/src/test/java/org/auscope/portal/jmock/VEGLJobMatcher.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.auscope.portal.jmock; - -import org.auscope.portal.server.vegl.VEGLJob; -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; - -/** - * Matcher for matching VEGLJob objects - * @author Josh Vote - * - */ -public class VEGLJobMatcher extends TypeSafeMatcher{ - private Integer id; - private boolean requireMismatch; - - public VEGLJobMatcher(Integer id) { - this.id = id; - this.requireMismatch = false; - } - - public VEGLJobMatcher(Integer id, boolean requireMismatch) { - this.id = id; - this.requireMismatch = requireMismatch; - } - - @Override - public void describeTo(Description description) { - if (requireMismatch) { - description.appendText(String.format("a VEGLJob without id='%1$s'", id)); - } else { - description.appendText(String.format("a VEGLJob with id='%1$s'", id)); - } - } - - @Override - public boolean matchesSafely(VEGLJob job) { - if (id == null && job.getId() == null) { - return !requireMismatch; - } - - if ((id == null) ^ (job.getId() == null)) { - return requireMismatch; - } - - return requireMismatch ^ (id.intValue() == job.getId().intValue()); - } - -} diff --git a/src/test/java/org/auscope/portal/jmock/VEGLSeriesMatcher.java b/src/test/java/org/auscope/portal/jmock/VEGLSeriesMatcher.java deleted file mode 100644 index 1fde37cc9..000000000 --- a/src/test/java/org/auscope/portal/jmock/VEGLSeriesMatcher.java +++ /dev/null @@ -1,72 +0,0 @@ -package org.auscope.portal.jmock; - -import org.auscope.portal.server.vegl.VEGLSeries; -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; - -/** - * A JUnit Matcher for matching VEGLSeries objects - * @author vot002 - * - */ -public class VEGLSeriesMatcher extends TypeSafeMatcher { - - private String user; - private String name; - private String description; - - /** - * Creates a new matcher that will only match a VEGLSeries object with specified - * name, user and description - * @param user Can be null - * @param name Can be null - * @param description Can be null - */ - public VEGLSeriesMatcher(String user, String name, String description) { - super(); - this.user = user; - this.name = name; - this.description = description; - } - - @SuppressWarnings("null") - private static boolean nullStringComparison(String s1, String s2) { - if ((s1 == null && s2 != null) || - (s1 != null && s2 == null)) { - return false; - } - - if (s1 == s2) { - return true; - } - - return s1.equals(s2); - } - - @Override - public boolean matchesSafely(VEGLSeries series) { - if (series == null) { - return false; - } - - if (!nullStringComparison(user, series.getUser())) { - return false; - } - - if (!nullStringComparison(name, series.getName())) { - return false; - } - - if (!nullStringComparison(description, series.getDescription())) { - return false; - } - - return true; - } - - @Override - public void describeTo(Description desc) { - desc.appendText(String.format("a VEGLSeries with user='%1$s' name='%2$s' description='%3$s'", user, name, description)); - } - -} diff --git a/src/test/java/org/auscope/portal/server/vegl/TestVEGLJobManager.java b/src/test/java/org/auscope/portal/server/vegl/TestVEGLJobManager.java deleted file mode 100644 index b11549f66..000000000 --- a/src/test/java/org/auscope/portal/server/vegl/TestVEGLJobManager.java +++ /dev/null @@ -1,246 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.util.Arrays; -import java.util.List; - -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.auscope.portal.server.web.service.VEGLJobService; -import org.auscope.portal.server.web.service.VEGLSeriesService; -import org.auscope.portal.server.web.service.VGLJobAuditLogService; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.dao.DataRetrievalFailureException; - -/** - * Unit tests for VEGLJobDao - * @author Richard Goh - */ -public class TestVEGLJobManager extends PortalTestClass { - - private VEGLJobManager jobManager; - - /* - private VEGLJobDao mockJobDao; - private VEGLSeriesDao mockSeriesDao; - private VGLJobAuditLogDao mockJobAuditLogDao; - private NCIDetailsDao mockNciDetailsDao; - */ - private VEGLJobService mockJobService; - private VEGLSeriesService mockSeriesService; - private VGLJobAuditLogService mockJobAuditLogService; - private NCIDetailsService mockNciDetailsService; - - /** - * Load our mock objects - */ - @Before - public void init() { - // Setting up mock objects needed for Object Under Test (OUT) - mockJobService = context.mock(VEGLJobService.class); - mockSeriesService = context.mock(VEGLSeriesService.class); - mockJobAuditLogService = context.mock(VGLJobAuditLogService.class); - mockNciDetailsService = context.mock(NCIDetailsService.class); - // Object Under Test - jobManager = new VEGLJobManager(); - jobManager.setVeglJobService(mockJobService); - jobManager.setVeglSeriesService(mockSeriesService); - jobManager.setVglJobAuditLogService(mockJobAuditLogService); - jobManager.setNciDetailsService(mockNciDetailsService); - } - - /** - * Tests that querying job series of a given criteria succeeds. - */ - @Test - public void testQuerySeries() { - final String user = "user@email.com"; - final String name = "user"; - final String desc = "series description"; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - final List seriesList = Arrays.asList(mockSeries); - - context.checking(new Expectations() {{ - oneOf(mockSeriesService).query(user, name, desc); - will(returnValue(seriesList)); - }}); - - Assert.assertNotNull(jobManager.querySeries(user, name, desc)); - } - - /** - * Tests that retrieving jobs of a given series succeeds. - * @throws PortalServiceException - */ - @Test - public void testGetSeriesJobs() throws PortalServiceException { - final int seriesId = 1; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final List jobList = Arrays.asList(mockJob); - final PortalUser user = new PortalUser(); - - context.checking(new Expectations() {{ - oneOf(mockJobService).getJobsOfSeries(seriesId, user);will(returnValue(jobList)); - oneOf(mockNciDetailsService).getByUser(user);will(returnValue(null)); - }}); - - Assert.assertNotNull(jobManager.getSeriesJobs(seriesId, user)); - } - - /** - * Tests that retrieving job of a given id succeeds. - * null is return when a job cannot be found. - */ - @Test - public void testGetJobById() throws Exception { - final int jobId1 = 1; - final int jobId2 = 2; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final PortalUser user = new PortalUser(); - final NCIDetails nciDetails = new NCIDetails(); - - nciDetails.setKey("mykey"); - nciDetails.setProject("myproj"); - nciDetails.setUsername("myuser"); - - context.checking(new Expectations() {{ - oneOf(mockJobService).get(jobId1, user);will(returnValue(mockJob)); - oneOf(mockJobService).get(jobId2, user);will(returnValue(null)); - oneOf(mockNciDetailsService).getByUser(user);will(returnValue(nciDetails)); - - oneOf(mockJob).setProperty(NCIDetails.PROPERTY_NCI_KEY, "mykey"); - oneOf(mockJob).setProperty(NCIDetails.PROPERTY_NCI_PROJECT, "myproj"); - oneOf(mockJob).setProperty(NCIDetails.PROPERTY_NCI_USER, "myuser"); - }}); - - Assert.assertNotNull(jobManager.getJobById(jobId1, user)); - Assert.assertNull(jobManager.getJobById(jobId2, user)); - } - - /** - * Tests that the deleting of a given job succeeds. - */ - @Test - public void testDeleteJob() { - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - oneOf(mockJobService).deleteJob(mockJob); - }}); - - jobManager.deleteJob(mockJob); - } - - /** - * Tests that retrieving series of a give id succeeds. - * null is returned when a series cannot be found. - */ - @Test - public void testGetSeriesById() { - final String userEmail= "dummy@dummy.comn"; - final int series1 = 1; - final int series2 = 2; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - oneOf(mockSeriesService).get(series1, userEmail); - will(returnValue(mockSeries)); - oneOf(mockSeriesService).get(series2, userEmail); - will(returnValue(null)); - }}); - - Assert.assertNotNull(jobManager.getSeriesById(series1, userEmail)); - // Test to ensure null is returned when user's signature - // cannot be found. - Assert.assertNull(jobManager.getSeriesById(series2, userEmail)); - } - - /** - * Tests that the storing of a given job succeeds. - */ - @Test - public void testSaveJob() { - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - oneOf(mockJobService).saveJob(mockJob); - }}); - - jobManager.saveJob(mockJob); - } - - /** - * Tests that creating job audit trail succeeds. - */ - @Test - public void testCreateJobAuditTrail() { - final String oldJobStatus = JobBuilderController.STATUS_UNSUBMITTED; - final VEGLJob mockCurJob = context.mock(VEGLJob.class); - final String message = "Job submitted"; - - context.checking(new Expectations() {{ - oneOf(mockCurJob).getId(); - will(returnValue(1)); - oneOf(mockCurJob).getStatus(); - will(returnValue(JobBuilderController.STATUS_PENDING)); - oneOf(mockJobAuditLogService).save(with(any(VGLJobAuditLog.class))); - }}); - - jobManager.createJobAuditTrail(oldJobStatus, mockCurJob, message); - } - - /** - * Tests that creating job audit trail fails. - */ - @Test - public void testCreateJobAuditTrail_Exception() { - final String oldJobStatus = JobBuilderController.STATUS_UNSUBMITTED; - final VEGLJob mockCurJob = context.mock(VEGLJob.class); - final String message = "Job submitted"; - - context.checking(new Expectations() {{ - oneOf(mockCurJob).getId(); - will(returnValue(1)); - oneOf(mockCurJob).getStatus(); - will(returnValue(JobBuilderController.STATUS_PENDING)); - oneOf(mockJobAuditLogService).save(with(any(VGLJobAuditLog.class))); - will(throwException(new DataRetrievalFailureException(""))); - }}); - - jobManager.createJobAuditTrail(oldJobStatus, mockCurJob, message); - } - - /** - * Tests that deleting a given series succeeds. - */ - @Test - public void testDeleteSeries() { - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - oneOf(mockSeriesService).delete(mockSeries); - }}); - - jobManager.deleteSeries(mockSeries); - } - - /** - * Tests that storing a given series succeeds. - */ - @Test - public void testSaveSeries() { - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - oneOf(mockSeriesService).save(mockSeries); - }}); - - jobManager.saveSeries(mockSeries); - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/vegl/TestVGLJobStatusAndLogReader.java b/src/test/java/org/auscope/portal/server/vegl/TestVGLJobStatusAndLogReader.java deleted file mode 100644 index 62ed08153..000000000 --- a/src/test/java/org/auscope/portal/server/vegl/TestVGLJobStatusAndLogReader.java +++ /dev/null @@ -1,635 +0,0 @@ -package org.auscope.portal.server.vegl; - -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.io.IOUtils; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.cloud.CloudJob; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudComputeService.InstanceStatus; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.core.util.ResourceUtil; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.controllers.JobListController; -import org.auscope.portal.server.web.security.NCIDetails; -import org.auscope.portal.server.web.service.CloudSubmissionService; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.ui.ModelMap; - -/** - * Unit tests for VGLJobStatusAndLogReader. - * - * @author Richard Goh - */ -public class TestVGLJobStatusAndLogReader extends PortalTestClass { - private static final String USER_EMAIL = "dummy@dummy.com"; - private final String storageServiceId = "storage-service-id"; - private final String computeServiceId = "compute-service-id"; - private VEGLJobManager mockJobManager; - private CloudStorageService[] mockCloudStorageServices; - private CloudComputeService[] mockCloudComputeServices; - private VGLJobStatusAndLogReader jobStatLogReader; - private CloudSubmissionService mockCloudSubmissionService; - - @Before - public void init() { - mockJobManager = context.mock(VEGLJobManager.class); - mockCloudStorageServices = new CloudStorageService[] { context.mock(CloudStorageService.class) }; - mockCloudComputeServices = new CloudComputeService[] { context.mock(CloudComputeService.class) }; - mockCloudSubmissionService = context.mock(CloudSubmissionService.class); - context.checking(new Expectations() {{ - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - }}); - - jobStatLogReader = new VGLJobStatusAndLogReader(mockJobManager, - mockCloudStorageServices, mockCloudComputeServices); - jobStatLogReader.setCloudSubmissionService(mockCloudSubmissionService); - } - - /** - * Tests that the get job status method returns a pending - * status when the status is still pending. - * - * @throws Exception - */ - @Test - public void testGetJobStatus_PendingToPending() throws Exception { - final int mockJobId = 123; - final String mockJobStatus = JobBuilderController.STATUS_PENDING; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final CloudFileInformation[] jobPendingFiles = new CloudFileInformation[] { - new CloudFileInformation("key3/filename", 100L, "http://public.url3/filename"), - new CloudFileInformation("key3/filename2", 101L, "http://public.url3/filename2"), - }; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(mockJobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(mockJobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(mockJobStatus)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - oneOf(mockCloudStorageServices[0]).listJobFiles(with(mockJob));will(returnValue(jobPendingFiles)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - oneOf(mockCloudComputeServices[0]).getJobStatus(mockJob);will(returnValue(InstanceStatus.Pending)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(JobBuilderController.STATUS_PENDING, status); - } - - /** - * Tests that the get job status method returns a pending - * status when the status is still pending (even if it's started running) - * - * @throws Exception - */ - @Test - public void testGetJobStatus_PendingToPending_RunningVM() throws Exception { - final int mockJobId = 123; - final String mockJobStatus = JobBuilderController.STATUS_PENDING; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final CloudFileInformation[] jobPendingFiles = new CloudFileInformation[] { - new CloudFileInformation("key3/filename", 100L, "http://public.url3/filename"), - new CloudFileInformation("key3/filename2", 101L, "http://public.url3/filename2"), - }; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(mockJobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(mockJobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(mockJobStatus)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - oneOf(mockCloudStorageServices[0]).listJobFiles(with(mockJob));will(returnValue(jobPendingFiles)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - oneOf(mockCloudComputeServices[0]).getJobStatus(mockJob);will(returnValue(InstanceStatus.Running)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(JobBuilderController.STATUS_PENDING, status); - } - - /** - * Tests that the get job status method returns a error - * status when its underlying VM goes missing - * - * @throws Exception - */ - @Test - public void testGetJobStatus_PendingToError() throws Exception { - final int mockJobId = 123; - final String mockJobStatus = JobBuilderController.STATUS_PENDING; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final CloudFileInformation[] jobPendingFiles = new CloudFileInformation[] { - new CloudFileInformation("key3/filename", 100L, "http://public.url3/filename"), - new CloudFileInformation("key3/filename2", 101L, "http://public.url3/filename2"), - }; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(mockJobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(mockJobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(mockJobStatus)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - oneOf(mockCloudStorageServices[0]).listJobFiles(with(mockJob));will(returnValue(jobPendingFiles)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - oneOf(mockCloudComputeServices[0]).getJobStatus(mockJob);will(returnValue(InstanceStatus.Missing)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(JobBuilderController.STATUS_ERROR, status); - } - - /** - * Tests that the get job status method returns active - * status when its status changes from pending to active. - * - * @throws Exception - */ - @Test - public void testGetJobStatus_PendingToActive() throws Exception { - final int mockJobId = 123; - final String mockJobStatus = JobBuilderController.STATUS_PENDING; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final CloudFileInformation[] jobActiveFiles = new CloudFileInformation[] { - new CloudFileInformation("key2/filename", 100L, "http://public.url2/filename"), - new CloudFileInformation("key2/filename3", 102L, "http://public.url2/filename3"), - new CloudFileInformation("key2/workflow-version.txt", 102L, "http://public.url2/filename3"), - }; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(mockJobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(mockJobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(mockJobStatus)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - oneOf(mockCloudStorageServices[0]).listJobFiles(with(mockJob));will(returnValue(jobActiveFiles)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - allowing(mockJob).isWalltimeSet(); will(returnValue(false)); - - allowing(mockJob).getWalltime(); will(returnValue(null)); - oneOf(mockCloudComputeServices[0]).getJobStatus(mockJob);will(returnValue(InstanceStatus.Running)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(JobBuilderController.STATUS_ACTIVE, status); - } - - /** - * Tests that the get job status method returns done - * status when its status changes from pending to done. - * - * @throws Exception - */ - @Test - public void testGetJobStatus_PendingToDone() throws Exception { - final int mockJobId = 123; - final String mockJobStatus = JobBuilderController.STATUS_PENDING; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final CloudFileInformation[] jobDoneFiles = new CloudFileInformation[] { - new CloudFileInformation("key3/workflow-version.txt", 100L, "http://public.url3/filename"), - new CloudFileInformation("key3/filename2", 101L, "http://public.url3/filename2"), - new CloudFileInformation("key3/vl.end", 102L, "http://public.url3/filename3"), - }; - - final List downloads = new ArrayList<>(); - VglDownload download = new VglDownload(1); - download.setUrl("http://portal-uploads.vgl.org/file1"); - download.setName("file1"); - downloads.add(download); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(mockJobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(mockJobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(mockJobStatus)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).listJobFiles(with(mockJob));will(returnValue(jobDoneFiles)); - allowing(mockJob).getUser();will(returnValue("JaneNg")); - allowing(mockJob).getJobDownloads();will(returnValue(downloads)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - oneOf(mockCloudComputeServices[0]).getJobStatus(mockJob);will(returnValue(InstanceStatus.Missing)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(JobBuilderController.STATUS_DONE, status); - } - - /** - * Tests that the status of a completed or un-submitted job - * remains unchanged. - */ - @Test - public void testGetJobStatus_DoneOrUnsubmittedJob() { - final int jobId = 123; - final String job123Status = JobBuilderController.STATUS_DONE; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(jobId, "a", "b", "c", USER_EMAIL, "d", "e", "f");will(returnValue(mockJob)); - oneOf(mockJob).getId();will(returnValue(jobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(job123Status)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue("a")); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue("b")); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue("c")); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue("d")); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue("e")); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue("f")); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(job123Status, status); - } - - /** - * Tests that the get job status returns null - * when the job cannot be found in database. - */ - @Test - public void testGetJobStatus_JobDNE() { - final int jobId = 123; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(jobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(null)); - oneOf(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertNull(status); - } - - /** - * Tests that the status of a job remains unchanged - * when it doesn't have a storage service attached to it. - */ - @Test - public void testGetJobStatus_NoStorageService() { - final int jobId = 123; - final String job123Status = JobBuilderController.STATUS_PENDING; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(jobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(jobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(job123Status)); - allowing(mockJob).getStorageServiceId();will(returnValue("does-not-exist")); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(job123Status, status); - } - - /** - * Tests that the status of a job remains unchanged - * when an error occurred while the storage service is down. - * @throws Exception - */ - @Test - public void testGetJobStatus_StorageServiceError() throws Exception { - final int jobId = 123; - final String job123Status = JobBuilderController.STATUS_PENDING; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(jobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(jobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(job123Status)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).listJobFiles(mockJob);will(throwException(new PortalServiceException("error"))); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(job123Status, status); - } - - /** - * Tests that the status of a job shifts to error if the job submission service loses a job - * @throws Exception - */ - @Test - public void testGetJobStatus_SubmissionServiceError() throws Exception { - final int jobId = 123; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(jobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - allowing(mockJob).getId();will(returnValue(jobId)); - allowing(mockJob).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob).getStatus();will(returnValue(JobBuilderController.STATUS_PROVISION)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - - oneOf(mockCloudSubmissionService).isSubmitting(mockJob, mockCloudComputeServices[0]);will(returnValue(false)); - oneOf(mockJobManager).getJobById(jobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob); - Assert.assertEquals(JobBuilderController.STATUS_ERROR, status); - } - - /** - * Tests that a job updating underneath us due to the CloudSubmissionService won't erroneously - * shift a job to ERROR - * @throws Exception - */ - @Test - public void testGetJobStatus_SubmissionServiceTransition() throws Exception { - final int jobId = 123; - final VEGLJob mockJob1 = context.mock(VEGLJob.class, "mockJob1"); - final VEGLJob mockJob2 = context.mock(VEGLJob.class, "mockJob2"); - final CloudFileInformation[] jobActiveFiles = new CloudFileInformation[] { - new CloudFileInformation("key2/filename", 100L, "http://public.url2/filename"), - new CloudFileInformation("key2/filename3", 102L, "http://public.url2/filename3"), - new CloudFileInformation("key2/workflow-version.txt", 102L, "http://public.url2/filename3"), - }; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(jobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob1)); - allowing(mockJob1).getId();will(returnValue(jobId)); - allowing(mockJob1).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob1).getStatus();will(returnValue(JobBuilderController.STATUS_PROVISION)); - allowing(mockJob1).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob1).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob1).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob1).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob1).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob1).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob1).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob1).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); - - //Pretend our job is going to shift to Pending while we are checking everything. Make sure we catch it and then - //proceed with a normal Pending - Active job check - oneOf(mockCloudSubmissionService).isSubmitting(mockJob1, mockCloudComputeServices[0]);will(returnValue(false)); - oneOf(mockJobManager).getJobById(jobId, null, null, null, USER_EMAIL, null, null, null);will(returnValue(mockJob2)); - allowing(mockJob2).getStatus();will(returnValue(JobBuilderController.STATUS_PENDING)); - allowing(mockJob2).getId();will(returnValue(jobId)); - allowing(mockJob2).getEmailAddress();will(returnValue(USER_EMAIL)); - allowing(mockJob2).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob2).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - oneOf(mockCloudStorageServices[0]).listJobFiles(with(mockJob2));will(returnValue(jobActiveFiles)); - allowing(mockJob2).getProperty(CloudJob.PROPERTY_STS_ARN); will(returnValue(null)); - allowing(mockJob2).getProperty(CloudJob.PROPERTY_CLIENT_SECRET); will(returnValue(null)); - allowing(mockJob2).getProperty(CloudJob.PROPERTY_S3_ROLE); will(returnValue(null)); - allowing(mockJob2).getProperty(NCIDetails.PROPERTY_NCI_USER); will(returnValue(null)); - allowing(mockJob2).getProperty(NCIDetails.PROPERTY_NCI_PROJECT); will(returnValue(null)); - allowing(mockJob2).getProperty(NCIDetails.PROPERTY_NCI_KEY); will(returnValue(null)); -// allowing(mockJob2).getSubmitDate(); will(returnValue(new Date())); - allowing(mockJob2).isWalltimeSet(); will(returnValue(false)); - allowing(mockJob2).getWalltime(); will(returnValue(null)); - oneOf(mockCloudComputeServices[0]).getJobStatus(mockJob2);will(returnValue(InstanceStatus.Running)); - }}); - - String status = jobStatLogReader.getJobStatus(mockJob1); - Assert.assertEquals(JobBuilderController.STATUS_ACTIVE, status); - } - - /** - * Tests that log sectioning works as expected - * @throws Exception - */ - @Test - public void testGetSectionedLogs() throws Exception { - try (final InputStream logContents = ResourceUtil.loadResourceAsStream("sectionedVglLog.txt")) { - final String logContentString = IOUtils.toString(ResourceUtil.loadResourceAsStream("sectionedVglLog.txt"), "utf-8"); - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() { - { - allowing(mockJob).getStorageServiceId(); - will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getId(); - will(returnValue(storageServiceId)); - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, JobListController.VL_LOG_FILE); - will(returnValue(logContents)); - } - }); - - ModelMap map = jobStatLogReader.getSectionedLogs(mockJob); - - // There should be 3 sections (we don't care about line ending - // formats - normalise it to unix style \n) - Assert.assertEquals(4, map.keySet().size()); - Assert.assertEquals("contents of env\n", stripCarriageReturns(map.get("environment").toString())); - Assert.assertEquals("multiple\nlines\n", stripCarriageReturns(map.get("test").toString())); - Assert.assertEquals("text\n", stripCarriageReturns(map.get("spaced header").toString())); - Assert.assertEquals(stripCarriageReturns(logContentString), - stripCarriageReturns(map.get("Full").toString())); - } - } - - /** - * Tests that log sectioning fails as expected when log lookup fails - * @throws Exception - */ - @Test - public void testGetSectionedLogs_NoStorageService() { - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockJob).getStorageServiceId();will(returnValue("does-not-exist")); - }}); - - try { - jobStatLogReader.getSectionedLogs(mockJob); - } catch (PortalServiceException ex) { - Assert.assertEquals("The specified job doesn't have a storage service.", ex.getMessage()); - Assert.assertEquals("Please ensure you have chosen a storage provider for the job.", ex.getErrorCorrection()); - } - } - - /** - * Tests that log sectioning fails as expected when log lookup fails at both the storage/compute service level - * @throws Exception - */ - @Test(expected=PortalServiceException.class) - public void testGetSectionedLogs_LogAccessErrorNull() throws Exception { - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(1)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, JobListController.VL_LOG_FILE);will(throwException(new PortalServiceException("error"))); - oneOf(mockCloudComputeServices[0]).getConsoleLog(mockJob);will(returnValue(null)); - }}); - - jobStatLogReader.getSectionedLogs(mockJob); - } - - /** - * Tests that log sectioning fails as expected when log lookup fails at both the storage/compute service level - * @throws Exception - */ - @Test(expected=PortalServiceException.class) - public void testGetSectionedLogs_LogAccessErrorEx() throws Exception { - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(1)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, JobListController.VL_LOG_FILE);will(throwException(new PortalServiceException("error"))); - oneOf(mockCloudComputeServices[0]).getConsoleLog(mockJob);will(throwException(new PortalServiceException("error"))); - }}); - - jobStatLogReader.getSectionedLogs(mockJob); - } - - /** - * Tests that log sectioning works when log lookup fails but compute lookup succeeds - * @throws Exception - */ - @Test - public void testGetSectionedLogs_LogAccessError_ComputeSuccess() throws Exception { - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String logContents = IOUtils.toString(ResourceUtil.loadResourceAsStream("sectionedVglLog.txt"), "utf-8"); - - context.checking(new Expectations() {{ - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(1)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, JobListController.VL_LOG_FILE);will(throwException(new PortalServiceException("error"))); - oneOf(mockCloudComputeServices[0]).getConsoleLog(mockJob);will(returnValue(logContents)); - }}); - - String result = jobStatLogReader.getSectionedLog(mockJob, "environment"); - Assert.assertEquals("contents of env\n", stripCarriageReturns(result)); - } - - /** - * Tests that log sectioning works as expected - * @throws Exception - */ - @Test - public void testGetSectionedLogs_WithSectionName() throws Exception { - try (final InputStream logContents = ResourceUtil.loadResourceAsStream("sectionedVglLog.txt")) { - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() { - { - allowing(mockJob).getStorageServiceId(); - will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getId(); - will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getJobFile(mockJob, JobListController.VL_LOG_FILE); - will(returnValue(logContents)); - } - }); - - String result = jobStatLogReader.getSectionedLog(mockJob, "environment"); - Assert.assertEquals("contents of env\n", stripCarriageReturns(result)); - } - } - - /** - * Tests that log sectioning works as expected - * @throws Exception - */ - @Test - public void testGetSectionedLogs_WithSectionNameError() throws Exception { - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(1)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, JobListController.VL_LOG_FILE);will(throwException(new PortalServiceException("error"))); - oneOf(mockCloudComputeServices[0]).getConsoleLog(mockJob);will(returnValue(null)); - }}); - - String result = jobStatLogReader.getSectionedLog(mockJob, "environment"); - Assert.assertNull(result); - } - - private static String stripCarriageReturns(String s) { - return s.replaceAll("\r", ""); - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/vegl/TestVglDownload.java b/src/test/java/org/auscope/portal/server/vegl/TestVglDownload.java deleted file mode 100644 index f1d5fceb2..000000000 --- a/src/test/java/org/auscope/portal/server/vegl/TestVglDownload.java +++ /dev/null @@ -1,21 +0,0 @@ -package org.auscope.portal.server.vegl; - -import org.auscope.portal.core.test.PortalTestClass; -import org.junit.Assert; -import org.junit.Test; - -public class TestVglDownload extends PortalTestClass { - /** - * Tests equals and hashCode align - */ - @Test - public void testEquality() { - VglDownload dl1 = new VglDownload(1); - VglDownload dl2 = new VglDownload(1); - VglDownload dl3 = new VglDownload(2); - - Assert.assertTrue(equalsWithHashcode(dl1, dl2)); - Assert.assertTrue(equalsWithHashcode(dl2, dl1)); - Assert.assertFalse(equalsWithHashcode(dl1, dl3)); - } -} diff --git a/src/test/java/org/auscope/portal/server/vegl/TestVglJobParameter.java b/src/test/java/org/auscope/portal/server/vegl/TestVglJobParameter.java deleted file mode 100644 index 973162e53..000000000 --- a/src/test/java/org/auscope/portal/server/vegl/TestVglJobParameter.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.auscope.portal.server.vegl; - -import org.auscope.portal.core.test.PortalTestClass; -import org.junit.Assert; -import org.junit.Test; - -public class TestVglJobParameter extends PortalTestClass { - /** - * Unit test to ensure 'equal' objects return the same hashcode - */ - @Test - public void testEqualsMatchesHashcode() { - VEGLJob veglJob1 = new VEGLJob(); - veglJob1.setId(1); - VEGLJob veglJob2 = new VEGLJob(); - veglJob2.setId(1); - VEGLJob veglJob3 = new VEGLJob(); - veglJob3.setId(2); - VEGLJob veglJob4 = new VEGLJob(); - veglJob4.setId(1); - VEGLJob veglJob5 = new VEGLJob(); - veglJob5.setId(3); - VglParameter p1 = new VglParameter(1, "name1", "v1", "number", veglJob1); - VglParameter p2 = new VglParameter(2, "name1", "v2", "string", veglJob2); - VglParameter p3 = new VglParameter(3, "name1", "v3", "number", veglJob3); - VglParameter p4 = new VglParameter(4, "name2", "v4", "string", veglJob4); - VglParameter p5 = new VglParameter(5, "name3", "v5", "number", veglJob5); - - //Test general equality - Assert.assertTrue(equalsWithHashcode(p1, p1)); - Assert.assertTrue(equalsWithHashcode(p1, p2)); - Assert.assertFalse(equalsWithHashcode(p1, p3)); - Assert.assertFalse(equalsWithHashcode(p1, p4)); - Assert.assertFalse(equalsWithHashcode(p1, p5)); - Assert.assertFalse(equalsWithHashcode(p2, p3)); - Assert.assertFalse(equalsWithHashcode(p2, p4)); - Assert.assertFalse(equalsWithHashcode(p2, p5)); - Assert.assertFalse(equalsWithHashcode(p3, p4)); - Assert.assertFalse(equalsWithHashcode(p3, p5)); - Assert.assertFalse(equalsWithHashcode(p4, p5)); - } -} diff --git a/src/test/java/org/auscope/portal/server/vegl/mail/TestJobCompletionMailSender.java b/src/test/java/org/auscope/portal/server/vegl/mail/TestJobCompletionMailSender.java deleted file mode 100644 index 23a29c1ae..000000000 --- a/src/test/java/org/auscope/portal/server/vegl/mail/TestJobCompletionMailSender.java +++ /dev/null @@ -1,211 +0,0 @@ -package org.auscope.portal.server.vegl.mail; - -import java.util.Calendar; -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.Properties; - -import org.apache.velocity.app.VelocityEngine; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.core.test.jmock.SimpleMailMessageMatcher; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.hamcrest.Matcher; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mail.SimpleMailMessage; -import org.springframework.mail.javamail.JavaMailSenderImpl; - -/** - * Unit tests for JobCompletionMailSender. - * - * @author Richard Goh - */ -public class TestJobCompletionMailSender extends PortalTestClass { - private VEGLJobManager mockJobManager; - private VGLJobStatusAndLogReader mockJobStatLogReader; - private JavaMailSenderImpl mockMailSender; - private JobCompletionMailSender jobCompMailSender; - private VEGLSeries mockSeries; - private VEGLJob mockJob; - private VelocityEngine velocityEngine; - private Date dateSubmitted = null; - private Date dateProcessed = null; - private Date dateExecuted = null; - - @Before - public void init() throws Exception { - //Mock objects to be used in the unit tests. - mockJobManager = context.mock(VEGLJobManager.class); - mockJobStatLogReader = context.mock(VGLJobStatusAndLogReader.class); - mockMailSender = context.mock(JavaMailSenderImpl.class); - mockSeries = context.mock(VEGLSeries.class); - mockJob = context.mock(VEGLJob.class); - - // Create actual Velocity engine needed for proper testing - Properties properties = new Properties(); - properties.setProperty("input.encoding", "UTF-8"); - properties.setProperty("output.encoding", "UTF-8"); - properties.setProperty("resource.loader", "class"); - properties.setProperty("class.resource.loader.class", - "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); - velocityEngine = new VelocityEngine(properties); - - //Global test variables to be used in all unit tests. - Calendar cal1 = new GregorianCalendar(2013, 2, 5, 12, 00, 00); - Calendar cal2 = new GregorianCalendar(2013, 2, 5, 12, 00, 10); - Calendar cal3 = new GregorianCalendar(2013, 2, 5, 12, 00, 55); - dateSubmitted = cal1.getTime(); - dateExecuted = cal2.getTime(); - dateProcessed = cal3.getTime(); - - //Create object under test with mock objects and set its required property fields. - jobCompMailSender = new JobCompletionMailSender(mockJobManager, mockJobStatLogReader, mockMailSender, velocityEngine); - jobCompMailSender.setTemplate("org/auscope/portal/server/web/service/monitor/templates/job-completion.tpl"); - jobCompMailSender.setDateFormat("EEE, d MMM yyyy HH:mm:ss"); - jobCompMailSender.setMaxLengthForSeriesNameInSubject(15); - jobCompMailSender.setMaxLengthForJobNameInSubject(15); - jobCompMailSender.setMaxLinesForTail(5); - jobCompMailSender.setEmailSender("test-admin@email.com"); - jobCompMailSender.setEmailSubject("VL Job (%s)"); - } - - /** - * jMock matcher used to compare SimpleMailMessage object. - */ - private static Matcher aSimpleMailMessage(String from, - String to, String subject, String text) { - return new SimpleMailMessageMatcher(from, to, subject, text); - } - - /** - * Tests that the content of email notification being generated - * contains information considered as essential. - */ - @Test - public void testConstructMailContent() { - final String user = "user@test.com"; - final int jobId = 123; - final String seriesName = "TestSeries#1"; - final String jobName = "TestJob#1"; - final String jobDescription = "Job#1Description"; - final String jobLog = "Line1\nLine2\nLine3\nLine4\nLine5\nLine6\nLine7"; - final String jobStatus="Done"; - - context.checking(new Expectations() {{ - allowing(mockJob).getSubmitDate();will(returnValue(dateSubmitted)); - allowing(mockJob).getExecuteDate();will(returnValue(dateExecuted)); - allowing(mockJob).getProcessDate();will(returnValue(dateProcessed)); - allowing(mockJob).getStatus();will(returnValue(jobStatus)); - allowing(mockJob).getUser();will(returnValue(user)); - oneOf(mockSeries).getName();will(returnValue(seriesName)); - oneOf(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getName();will(returnValue(jobName)); - oneOf(mockJob).getDescription();will(returnValue(jobDescription)); - - oneOf(mockJobStatLogReader).getSectionedLog(mockJob, "Python");will(returnValue(jobLog)); - }}); - - String content = jobCompMailSender.constructMailContent(mockSeries.getName(), mockJob); - //Email content shouldn't be null. - Assert.assertNotNull(content); - //Email body must contain user email alias, job id, series name and job name. - Assert.assertTrue(content.contains("user")); - Assert.assertTrue(content.contains(String.valueOf(jobId))); - Assert.assertTrue(content.contains(seriesName)); - Assert.assertTrue(content.contains(jobName)); - } - - /** - * Tests that the sending of job completion email notification succeeds. - */ - @Test - public void testSendMail() { - final String user = "user@test.com"; - final int jobId = 123; - final int seriesId = 1; - final String seriesName = "TestSeries#1abcdefgh"; - final String jobName = "TestJob#1abcdefghijk"; - final String jobDescription = "Job#1Description"; - final String jobLog = "Line1\nLine2\nLine3\nLine4\nLine5\nLine6\nLine7"; - final String jobNameInSubject = - jobName.substring(0, jobCompMailSender.getMaxLengthForJobNameInSubject()); - final String subject = String.format(jobCompMailSender.getEmailSubject(), jobNameInSubject); - final String jobStatus="Done"; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getSeriesById(seriesId, user);will(returnValue(mockSeries)); - allowing(mockJob).getSeriesId();will(returnValue(seriesId)); - - oneOf(mockSeries).getName();will(returnValue(seriesName)); - - oneOf(mockJob).getName();will(returnValue(jobName)); - allowing(mockJob).getEmailAddress();will(returnValue(user)); - - //The following expectations are for invoking constructMailContent method. - allowing(mockJob).getSubmitDate();will(returnValue(dateSubmitted)); - allowing(mockJob).getExecuteDate();will(returnValue(dateExecuted)); - allowing(mockJob).getProcessDate();will(returnValue(dateProcessed)); - allowing(mockJob).getUser();will(returnValue(user)); - allowing(mockJob).getStatus();will(returnValue(jobStatus)); - oneOf(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getName();will(returnValue(jobName)); - oneOf(mockJob).getDescription();will(returnValue(jobDescription)); - //Ensure we've one call to getSectionedLog to get Python execution log - oneOf(mockJobStatLogReader).getSectionedLog(mockJob, "Python");will(returnValue(jobLog)); - //Ensure we've one call to MailSender to send out job completion notification - oneOf(mockMailSender).send(with(aSimpleMailMessage(null, null, subject, null))); - }}); - - jobCompMailSender.sendMail(mockJob); - } - - /** - * Tests that failure or exception thrown in sending out - * email notification would not be propagated back to the - * caller. - */ - @Test - public void testSendMail_MailException() { - final String user = "user@test.com"; - final int jobId = 123; - final int seriesId = 1; - final String seriesName = "TestSeries#1abcdefgh"; - final String jobName = "TestJob#1abcdefghijk"; - final String jobDescription = "Job#1Description"; - final String jobLog = "Line1\nLine2\nLine3\nLine4\nLine5\nLine6\nLine7"; - final Exception sendMailEx = new Exception(); - final String jobStatus="Done"; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getSeriesById(seriesId, user);will(returnValue(mockSeries)); - allowing(mockJob).getSeriesId();will(returnValue(seriesId)); - - oneOf(mockSeries).getName();will(returnValue(seriesName)); - - oneOf(mockJob).getName();will(returnValue(jobName)); - oneOf(mockJob).getEmailAddress();will(returnValue(user)); - - //The following expectations are for invoking constructMailContent method. - allowing(mockJob).getSubmitDate();will(returnValue(dateSubmitted)); - allowing(mockJob).getExecuteDate();will(returnValue(dateExecuted)); - allowing(mockJob).getProcessDate();will(returnValue(dateProcessed)); - allowing(mockJob).getUser();will(returnValue(user)); - allowing(mockJob).getStatus();will(returnValue(jobStatus)); - allowing(mockJob).getId();will(returnValue(jobId)); - allowing(mockJob).getEmailAddress();will(returnValue(user)); - oneOf(mockJob).getName();will(returnValue(jobName)); - oneOf(mockJob).getDescription();will(returnValue(jobDescription)); - //Ensure we've one call to getSectionedLog to get Python execution log - oneOf(mockJobStatLogReader).getSectionedLog(mockJob, "Python");will(returnValue(jobLog)); - //Ensure we've one call to MailSender to send out job completion notification - oneOf(mockMailSender).send(with(any(SimpleMailMessage.class)));will(throwException(sendMailEx)); - }}); - - jobCompMailSender.sendMail(mockJob); - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/web/controllers/TestBaseCloudController.java b/src/test/java/org/auscope/portal/server/web/controllers/TestBaseCloudController.java deleted file mode 100644 index 527530524..000000000 --- a/src/test/java/org/auscope/portal/server/web/controllers/TestBaseCloudController.java +++ /dev/null @@ -1,139 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class TestBaseCloudController extends PortalTestClass { - /** - * Dummy Implementation to test abstract class - */ - private class TestableBaseCloudController extends BaseCloudController { - public TestableBaseCloudController( - CloudStorageService[] cloudStorageServices, - CloudComputeService[] cloudComputeServices) { - super(cloudStorageServices, cloudComputeServices, mockJobManager); - } - } - - CloudStorageService[] mockStorageServices = new CloudStorageService[] {context.mock(CloudStorageService.class, "css1"), - context.mock(CloudStorageService.class, "css2"), - context.mock(CloudStorageService.class, "css3")}; - - CloudComputeService[] mockComputeServices = new CloudComputeService[] {context.mock(CloudComputeService.class, "ccs1"), - context.mock(CloudComputeService.class, "ccs2"), - context.mock(CloudComputeService.class, "ccs3")}; - - VEGLJobManager mockJobManager = context.mock(VEGLJobManager.class); - - /** - * Configure mock services - */ - @Before - public void setup() { - context.checking(new Expectations() {{ - allowing(mockStorageServices[0]).getId();will(returnValue("oneId-s")); - allowing(mockStorageServices[1]).getId();will(returnValue("anotherId-s")); - allowing(mockStorageServices[2]).getId();will(returnValue("yetAnotherId-s")); - - allowing(mockComputeServices[0]).getId();will(returnValue("oneId-c")); - allowing(mockComputeServices[1]).getId();will(returnValue("anotherId-c")); - allowing(mockComputeServices[2]).getId();will(returnValue("yetAnotherId-c")); - }}); - } - - /** - * Tests getting a storage service works with a string - */ - @Test - public void testGetStorageService() { - TestableBaseCloudController controller = new TestableBaseCloudController(mockStorageServices, mockComputeServices); - - String existingId = "anotherId-s"; - String nonExistingId = "DNE"; - String nullId = null; - - CloudStorageService result = controller.getStorageService(existingId); - Assert.assertNotNull(result); - Assert.assertEquals(existingId, result.getId()); - - Assert.assertNull(controller.getStorageService(nonExistingId)); - Assert.assertNull(controller.getStorageService(nullId)); - } - - /** - * Tests getting a storage service works with a job - */ - @Test - public void testGetStorageService_Job() { - TestableBaseCloudController controller = new TestableBaseCloudController(mockStorageServices, mockComputeServices); - - String existingId = "anotherId-s"; - String nonExistingId = "DNE"; - String nullId = null; - - VEGLJob job = new VEGLJob(); - job.setId(123); - - job.setStorageServiceId(existingId); - CloudStorageService result = controller.getStorageService(job); - Assert.assertNotNull(result); - Assert.assertEquals(existingId, result.getId()); - - job.setStorageServiceId(nonExistingId); - Assert.assertNull(controller.getStorageService(job)); - job.setStorageServiceId(nullId); - Assert.assertNull(controller.getStorageService(job)); - } - - /** - * Tests getting a Compute service works with a string - */ - @Test - public void testGetComputeService() { - TestableBaseCloudController controller = new TestableBaseCloudController(mockStorageServices, mockComputeServices); - - String existingId = "anotherId-c"; - String nonExistingId = "DNE"; - String nullId = null; - - CloudComputeService result = controller.getComputeService(existingId); - Assert.assertNotNull(result); - Assert.assertEquals(existingId, result.getId()); - - Assert.assertNull(controller.getComputeService(nonExistingId)); - Assert.assertNull(controller.getStorageService(nullId)); - } - - /** - * Tests getting a storage service works with a job - * @throws Exception - */ - @Test - public void testGetComputeService_Job() { - TestableBaseCloudController controller = new TestableBaseCloudController(mockStorageServices, mockComputeServices); - - String existingId = "anotherId-c"; - String nonExistingId = "DNE"; - String nullId = null; - - VEGLJob job = new VEGLJob(); - job.setId(123); - - job.setComputeServiceId(existingId); - CloudComputeService result = controller.getComputeService(job); - Assert.assertNotNull(result); - Assert.assertEquals(existingId, result.getId()); - - job.setComputeServiceId(nonExistingId); - Assert.assertNull(controller.getComputeService(job)); - job.setComputeServiceId(nullId); - Assert.assertNull(controller.getComputeService(job)); - } -} diff --git a/src/test/java/org/auscope/portal/server/web/controllers/TestEncryptionService.java b/src/test/java/org/auscope/portal/server/web/controllers/TestEncryptionService.java deleted file mode 100644 index 6ef83e447..000000000 --- a/src/test/java/org/auscope/portal/server/web/controllers/TestEncryptionService.java +++ /dev/null @@ -1,37 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.web.service.VGLCryptoService; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -/** - * Unit tests for MenuController - * - */ -public class TestEncryptionService extends PortalTestClass { - private VGLCryptoService uc = null; - final String PASSWORD = "testPassword1234"; - - @Before - public void setup() throws PortalServiceException { - uc= new VGLCryptoService(PASSWORD); - } - - /** - * Tests the existence of certain critical API keys + the correct view name being extracted - * @throws Exception - */ - @Test - public void testPasswordEncryption() throws Exception { - final String secret = "this is a secret for testing."; - - byte[] enc = uc.encrypt(secret); - - String dec = uc.decrypt(enc); - - Assert.assertEquals(secret, dec); - } -} diff --git a/src/test/java/org/auscope/portal/server/web/controllers/TestJobBuilderController.java b/src/test/java/org/auscope/portal/server/web/controllers/TestJobBuilderController.java deleted file mode 100644 index 3881bd466..000000000 --- a/src/test/java/org/auscope/portal/server/web/controllers/TestJobBuilderController.java +++ /dev/null @@ -1,1765 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; -import jakarta.servlet.http.HttpSession; - -import org.apache.commons.collections.iterators.IteratorEnumeration; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.cloud.ComputeType; -import org.auscope.portal.core.cloud.MachineImage; -import org.auscope.portal.core.cloud.StagedFile; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageServiceJClouds; -import org.auscope.portal.core.services.cloud.STSRequirement; -import org.auscope.portal.core.util.ResourceUtil; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.vegl.VglMachineImage; -import org.auscope.portal.server.vegl.VglParameter; -import org.auscope.portal.server.vegl.mail.JobMailSender; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.ANVGLFileStagingService; -import org.auscope.portal.server.web.service.ANVGLProvenanceService; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.CloudSubmissionService; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.auscope.portal.server.web.service.ScmEntryService; -import org.auscope.portal.server.web.service.monitor.VGLJobStatusChangeHandler; -import org.auscope.portal.server.web.service.scm.Solution; -import org.jmock.Expectations; -import org.jmock.Mockery; -import org.jmock.Sequence; -import org.jmock.imposters.ByteBuddyClassImposteriser; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.ui.ModelMap; -import org.springframework.web.multipart.MultipartHttpServletRequest; -import org.springframework.web.servlet.ModelAndView; - -/** - * Unit tests for JobBuilderController - * @author Josh Vote - * - */ -public class TestJobBuilderController { - private Mockery context = new Mockery() {{ - setImposteriser(ByteBuddyClassImposteriser.INSTANCE); - }}; - - private VEGLJobManager mockJobManager; - private CloudStorageServiceJClouds[] mockCloudStorageServices; - private CloudComputeService[] mockCloudComputeServices; - private HttpServletRequest mockRequest; - private HttpServletResponse mockResponse; - private HttpSession mockSession; - private PortalUser mockPortalUser; - private VGLJobStatusChangeHandler vglJobStatusChangeHandler; - private CloudSubmissionService mockCloudSubmissionService; - private PortalUserService mockUserService; - private ANVGLProvenanceService mockAnvglProvenanceService; - private ANVGLFileStagingService mockFileStagingService; - - //private NCIDetailsDao mockNciDetailsDao; - private NCIDetailsService mockNciDetailsService; - - private JobMailSender mockJobMailSender; - private VGLJobStatusAndLogReader mockVGLJobStatusAndLogReader; - private ScmEntryService mockScmEntryService; - - private JobBuilderController controller; - private final String vmSh = "http://example2.org"; - private final String vmShutdownSh = "http://example2.org"; - - private PortalUser user; - private VEGLJob job; - private final String jobId = "123"; - private final String userId = "456"; - private final String seriesId = "789"; - private VEGLJob mockJob; - private VEGLSeries mockSeries; - - @Before - public void init() { - //Mock objects required for Object Under Test - mockJobManager = context.mock(VEGLJobManager.class); - mockFileStagingService = context.mock(ANVGLFileStagingService.class); - mockPortalUser = context.mock(PortalUser.class); - mockCloudStorageServices = new CloudStorageServiceJClouds[] {context.mock(CloudStorageServiceJClouds.class)}; - mockCloudComputeServices = new CloudComputeService[] {context.mock(CloudComputeService.class)}; - mockRequest = context.mock(HttpServletRequest.class); - mockResponse = context.mock(HttpServletResponse.class); - mockSession = context.mock(HttpSession.class); - - //mockNciDetailsDao = context.mock(NCIDetailsDao.class); - mockNciDetailsService = context.mock(NCIDetailsService.class); - - mockJobMailSender = context.mock(JobMailSender.class); - mockVGLJobStatusAndLogReader = context.mock(VGLJobStatusAndLogReader.class); - - mockUserService = context.mock(PortalUserService.class); - - mockAnvglProvenanceService = context.mock(ANVGLProvenanceService.class); - mockScmEntryService = context.mock(ScmEntryService.class); - - vglJobStatusChangeHandler = new VGLJobStatusChangeHandler(mockJobManager,mockJobMailSender,mockVGLJobStatusAndLogReader, mockAnvglProvenanceService); - mockCloudSubmissionService = context.mock(CloudSubmissionService.class); - - mockJob = context.mock(VEGLJob.class); - mockSeries = context.mock(VEGLSeries.class); - - //Object Under Test - - controller = - new JobBuilderController("dummy@dummy.com", - "http://example.org/scm/toolbox/42", - mockJobManager, - mockFileStagingService, - vmSh, - vmShutdownSh, - mockCloudStorageServices, - mockCloudComputeServices, - mockUserService, - vglJobStatusChangeHandler, - mockScmEntryService, - mockAnvglProvenanceService, - mockCloudSubmissionService); - //mockNciDetailsDao); - - user = new PortalUser(); - user.setEmail("user@example.com"); - user.setId(userId); - job = new VEGLJob(); - job.setId(Integer.parseInt(jobId)); - job.setEmailAddress("user@example.com"); - job.setUser("user@example.com"); - context.checking(new Expectations() {{ - allowing(mockJob).getId();will(returnValue(job.getUser())); - allowing(mockJob).getUser();will(returnValue(job.getEmailAddress())); - allowing(mockJob).getEmailAddress();will(returnValue(job.getEmailAddress())); - allowing(mockPortalUser).getId();will(returnValue(userId)); - allowing(mockPortalUser).getEmail();will(returnValue(user.getEmail())); - allowing(mockUserService).getLoggedInUser();will(returnValue(mockPortalUser)); - allowing(mockSeries).getUser();will(returnValue(job.getEmailAddress())); - allowing(mockSeries).getId();will(returnValue(Integer.parseInt(seriesId))); - }}); - } - - - /** - * Tests that retrieving job object succeeds. - * @throws PortalServiceException - */ - @Test - public void testGetJobObject() throws PortalServiceException { - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(returnValue(job)); - }}); - - ModelAndView mav = controller.getJobObject(jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNotNull(mav.getModel().get("data")); - } - - /** - * Tests that retrieving job object fails when the - * underlying job manager's job query service fails. - * @throws PortalServiceException - */ - @Test - public void testGetJobObject_Exception() throws PortalServiceException { - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.getJobObject(jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that the retrieving of a list of job file object - * succeeds. - * @throws Exception - */ - @Test - public void testListStagedJobFiles() throws Exception { - final File mockFile1 = new File("MockFile1"); - final File mockFile2 = new File("MockFile2"); - final StagedFile[] mockStageFiles = new StagedFile[] { - new StagedFile(job, "mockFile1", mockFile1), - new StagedFile(job, "mockFile2", mockFile2) }; - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(returnValue(job)); - - //We should have a call to file staging service to get our files - oneOf(mockFileStagingService).listStageInDirectoryFiles(job); - will(returnValue(mockStageFiles)); - }}); - - ModelAndView mav = controller.stagedJobFiles(jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNotNull(mav.getModel().get("data")); - } - - /** - * Tests that the retrieving of job files fails - * when the job cannot be found. - * @throws PortalServiceException - */ - @Test - public void testListStagedJobFiles_JobNotFound() throws PortalServiceException { - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.stagedJobFiles(jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that the retrieving of job files fails - * when the underlying file staging service fails. - * @throws Exception - */ - @Test - public void testListStagedJobFiles_Exception() throws Exception { - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(returnValue(job)); - //We should have a call to file staging service to get our files - oneOf(mockFileStagingService).listStageInDirectoryFiles(job); - will(throwException(new PortalServiceException("test exception","test exception"))); - }}); - - ModelAndView mav = controller.stagedJobFiles(jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that the downloading of job file fails when - * the underlying file staging service's file download handler fails. - * @throws Exception - */ - @Test - public void testDownloadFile() throws Exception { - final String filename = "test.py"; - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(with(equal(job.getId())), with(same(mockPortalUser))); - will(returnValue(job)); - //We should have a call to file staging service to download a file - oneOf(mockFileStagingService).handleFileDownload(job, filename, mockResponse); - }}); - - ModelAndView mav = controller.downloadFile(mockRequest, mockResponse, jobId, filename); - Assert.assertNull(mav); - } - - /** - * Tests that the deleting of given job files succeeds. - * @throws PortalServiceException - */ - @Test - public void testDeleteFiles() throws PortalServiceException { - final String file1 = "file1.txt"; - final String file2 = "file2.txt"; - final String[] filenames = new String[] { file1, file2 }; - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(with(equal(job.getId())), with(same(mockPortalUser))); - will(returnValue(job)); - //We should have calls to file staging service to delete files in staging dir - oneOf(mockFileStagingService).deleteStageInFile(job, file1); - will(returnValue(true)); - oneOf(mockFileStagingService).deleteStageInFile(job, file2); - will(returnValue(true)); - }}); - - ModelAndView mav = controller.deleteFiles(jobId, filenames); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that the deleting of given job files fails - * when the job cannot be found. - * @throws PortalServiceException - */ - @Test - public void testDeleteFiles_JobNotFoundException() throws PortalServiceException { - final String file1 = "file1.txt"; - final String file2 = "file2.txt"; - final String[] filenames = new String[] { file1, file2 }; - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.deleteFiles(jobId, filenames); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that the deleting of a given job's download objects succeeds. - * @throws PortalServiceException - */ - @Test - public void testDeleteDownloads() throws PortalServiceException { - int downloadId1 = 13579; - int downloadId2 = 23480; - final Integer[] downloadIds = new Integer[] { downloadId1, downloadId2 }; - VglDownload vglDownload1 = new VglDownload(downloadId1); - VglDownload vglDownload2 = new VglDownload(downloadId2); - VglDownload[] vglDownloads = new VglDownload[] { vglDownload1, vglDownload2 }; - final List downloadList = new ArrayList(Arrays.asList(vglDownloads)); - job.setJobDownloads(downloadList); - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser); - will(returnValue(job)); - //We should have a call to our job manager to save our job object - oneOf(mockJobManager).saveJob(job); - }}); - - ModelAndView mav = controller.deleteDownloads(jobId, downloadIds); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that the deleting of a given job's download objects fails - * when job saving fails. - * @throws PortalServiceException - */ - @Test - public void testDeleteDownloads_SaveJobException() throws PortalServiceException { - int downloadId1 = 13579; - int downloadId2 = 23480; - final Integer[] downloadIds = new Integer[] { downloadId1, downloadId2 }; - VglDownload vglDownload1 = new VglDownload(downloadId1); - VglDownload vglDownload2 = new VglDownload(downloadId2); - VglDownload[] vglDownloads = new VglDownload[] { vglDownload1, vglDownload2 }; - final List downloadList = new ArrayList(Arrays.asList(vglDownloads)); - job.setJobDownloads(downloadList); - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser); - will(returnValue(job)); - //We should have a call to our job manager to save our job object - oneOf(mockJobManager).saveJob(job); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.deleteDownloads(jobId, downloadIds); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that the deleting of a given job's download objects fails - * when the job cannot be found. - * @throws PortalServiceException - */ - @Test - public void testDeleteDownloads_JobNotFoundException() throws PortalServiceException { - int downloadId1 = 13579; - int downloadId2 = 23480; - final Integer[] downloadIds = new Integer[] { downloadId1, downloadId2 }; - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.deleteDownloads(jobId, downloadIds); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that file uploading for a given job succeeds. - * @throws Exception - */ - @Test - public void testUploadFile() throws Exception { - - final MultipartHttpServletRequest mockMultipartRequest = context.mock(MultipartHttpServletRequest.class); - final File mockFile = new File(""); - final StagedFile mockStagedFile = new StagedFile(job, "mockFile", mockFile); - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser); - will(returnValue(job)); - - //We should have a call to file staging service to update a file - oneOf(mockFileStagingService).handleFileUpload(job, mockMultipartRequest); - will(returnValue(mockStagedFile)); - }}); - - ModelAndView mav = controller.uploadFile(mockMultipartRequest, mockResponse, job.getId().toString()); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that file uploading for a given job fails - * when the underlying file staging file upload handler fails. - * @throws Exception - */ - @Test - public void testUploadFile_FileUploadException() throws Exception { - - final MultipartHttpServletRequest mockMultipartRequest = context.mock(MultipartHttpServletRequest.class); - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser); - will(returnValue(job)); - - //We should have a call to file staging service to update a file - oneOf(mockFileStagingService).handleFileUpload(job, mockMultipartRequest); - will(throwException(new PortalServiceException("Test Exception","Test Exception"))); - }}); - - ModelAndView mav = controller.uploadFile(mockMultipartRequest, mockResponse, job.getId().toString()); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that file uploading for a given job fails - * when the given job cannot be found. - * @throws Exception - */ - @Test - public void testUploadFile_JobNotFoundException() throws Exception { - - final MultipartHttpServletRequest mockMultipartRequest = context.mock(MultipartHttpServletRequest.class); - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.uploadFile(mockMultipartRequest, mockResponse, job.getId().toString()); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that retrieving of a given job's download objects succeeds. - * @throws PortalServiceException - */ - @Test - public void testGetJobDownloads() throws PortalServiceException { - final VglDownload[] existingDownloads = new VglDownload[] { new VglDownload(12356) }; - final List downloadList = new ArrayList(Arrays.asList(existingDownloads)); - - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(returnValue(mockJob)); - //We should have a call to job object to get a list of download objects - oneOf(mockJob).getJobDownloads(); - will(returnValue(downloadList)); - }}); - - ModelAndView mav = controller.getJobDownloads(Integer.parseInt(jobId)); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNotNull(mav.getModel().get("data")); - } - - /** - * Tests that retrieving of a given job's download objects fails - * when the given job cannot be found. - * @throws PortalServiceException - */ - @Test - public void testGetJobDownloads_Exception() throws PortalServiceException { - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.getJobDownloads(Integer.parseInt(jobId)); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that the getting of job status succeeds. - * @throws PortalServiceException - */ - @Test - public void testGetJobStatus() throws PortalServiceException { - final String expectedStatus = "Pending"; - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(returnValue(mockJob)); - - oneOf(mockJob).getStatus(); - will(returnValue(expectedStatus)); - - allowing(mockJob).getEmailAddress(); - will(returnValue(job.getEmailAddress())); - }}); - - ModelAndView mav = controller.getJobStatus(jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - String jobStatus = (String)mav.getModel().get("data"); - Assert.assertEquals(expectedStatus, jobStatus); - } - - /** - * Tests that the retrieving of job status fails - * when the job cannot be found. - * @throws PortalServiceException - */ - @Test - public void testGetJobStatus_JobNotFound() throws PortalServiceException { - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.getJobStatus(jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that cancelling submission succeeds. - * @throws Exception - */ - @Test - public void testCancelSubmission() throws Exception { - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(returnValue(job)); - //We should have a call to file staging service to get our files - oneOf(mockFileStagingService).deleteStageInDirectory(job); - will(returnValue(true)); - }}); - - ModelAndView mav = controller.cancelSubmission(jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that cancelling job submission fails when the - * job cannot be found. - * @throws Exception - */ - @Test - public void testCancelSubmission_Exception() throws Exception { - - context.checking(new Expectations() {{ - //We should have a call to our job manager to get our job object - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser); - will(throwException(new Exception())); - }}); - - ModelAndView mav = controller.cancelSubmission(jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that job submission correctly interacts with all dependencies - * @throws Exception - */ - @Test - public void testJobSubmission() throws Exception { - //Instantiate our job object - - final File file1 = new File("MockFile1"); - final File file2 = new File("MockFile2"); - final StagedFile[] stageInFiles = new StagedFile[] {new StagedFile(job, "mockFile1", file1), new StagedFile(job, "mockFile2", file2)}; - final String computeVmId = "compute-vmi-id"; - final String computeServiceId = "compute-service-id"; - final String instanceId = "new-instance-id"; - final String computeKeyName = "key-name"; - final Sequence jobFileSequence = context.sequence("jobFileSequence"); //this makes sure we aren't deleting directories before uploading (and other nonsense) - final OutputStream outputStream = new ByteArrayOutputStream(); - final String jobInSavedState = JobBuilderController.STATUS_UNSUBMITTED; - final VglMachineImage[] mockImages = new VglMachineImage[] {context.mock(VglMachineImage.class)}; - final String storageBucket = "storage-bucket"; - final String storageAccess = "213-asd-54"; - final String storageSecret = "tops3cret"; - final String storageServiceId = "storageid"; - final String storageEndpoint = "http://example.org"; - final String storageProvider = "provider"; - final String storageAuthVersion = "1.2.3"; - final String regionName = null; - - final String mockUser = "jo@me.com"; - final File activityFile = File.createTempFile("activity", ".ttl"); - final String activityFileName = "activity.ttl"; - final CloudFileInformation cloudFileInformation = new CloudFileInformation("one", 0, ""); - CloudFileInformation cloudFileModel = new CloudFileInformation("two", 0, ""); - final CloudFileInformation[] cloudList = {cloudFileInformation, cloudFileModel}; - - final Solution mockSolution = context.mock(Solution.class); - final Set solutions = new HashSet(); - solutions.add(mockSolution); - - job.setComputeVmId(computeVmId); - job.setStatus(jobInSavedState); // by default, the job is in SAVED state - job.setStorageBaseKey("base/key"); - job.setComputeServiceId(computeServiceId); - job.setStorageServiceId(storageServiceId); - job.setStorageBucket(storageBucket); - - context.checking(new Expectations() {{ - oneOf(mockScmEntryService).getJobSolutions(job);will(returnValue(solutions)); - oneOf(mockSolution).getUri();will(returnValue("http://sssc.vhirl.org/solution1")); - oneOf(mockSolution).getDescription();will(returnValue("A Fake Solution")); - oneOf(mockSolution).getName();will(returnValue("FakeSol")); - oneOf(mockSolution).getCreatedAt();will(returnValue(new Date())); - - //We should have access control check to ensure user has permission to run the job - oneOf(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(mockImages)); - oneOf(mockImages[0]).getImageId();will(returnValue("compute-vmi-id")); - oneOf(mockImages[0]).getPermissions();will(returnValue(new String[] {"testRole2"})); - allowing(mockRequest).isUserInRole("testRole2");will(returnValue(true)); - - //We should have 1 call to our job manager to get our job object and 1 call to save it - //oneOf(mockJobManager).getJobById(job.getId(), user);will(returnValue(job)); - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser);will(returnValue(job)); - oneOf(mockJobManager).saveJob(job); - - oneOf(mockFileStagingService).writeFile(job, JobBuilderController.DOWNLOAD_SCRIPT); - will(returnValue(outputStream)); - - //We should have 1 call to get our stage in files - oneOf(mockFileStagingService).listStageInDirectoryFiles(job);will(returnValue(stageInFiles)); - inSequence(jobFileSequence); - - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getAccessKey();will(returnValue(storageAccess)); - allowing(mockCloudStorageServices[0]).getSecretKey();will(returnValue(storageSecret)); - allowing(mockCloudStorageServices[0]).getProvider();will(returnValue(storageProvider)); - allowing(mockCloudStorageServices[0]).getProvider();will(returnValue(storageProvider)); - allowing(mockCloudStorageServices[0]).getAuthVersion();will(returnValue(storageAuthVersion)); - allowing(mockCloudStorageServices[0]).getEndpoint();will(returnValue(storageEndpoint)); - allowing(mockCloudStorageServices[0]).getProvider();will(returnValue(storageProvider)); - allowing(mockCloudStorageServices[0]).getAuthVersion();will(returnValue(storageAuthVersion)); - allowing(mockCloudStorageServices[0]).getRegionName();will(returnValue(regionName)); - allowing(mockCloudStorageServices[0]).getStsRequirement();will(returnValue(STSRequirement.Permissable)); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - - //We should have 1 call to upload them - oneOf(mockCloudStorageServices[0]).uploadJobFiles(with(equal(job)), with(equal(new File[] {file1, file2}))); - inSequence(jobFileSequence); - - //And finally 1 call to execute the job - oneOf(mockCloudComputeServices[0]).executeJob(with(any(VEGLJob.class)), with(any(String.class)));will(returnValue(instanceId)); - - oneOf(mockJobManager).saveJob(job); - - //We should have 1 call to our job manager to create a job audit trail record - oneOf(mockJobManager).createJobAuditTrail(with(JobBuilderController.STATUS_UNSUBMITTED), with(job), with(any(String.class))); - oneOf(mockCloudSubmissionService).queueSubmission(with(mockCloudComputeServices[0]), with(job), with(any(String.class))); - - oneOf(mockRequest).getRequestURL();will(returnValue(new StringBuffer("http://mock.fake/secure/something"))); - oneOf(mockCloudStorageServices[0]).listJobFiles(with(equal(job)));will(returnValue(cloudList)); - allowing(mockFileStagingService).createLocalFile(activityFileName, job);will(returnValue(activityFile)); - allowing(mockCloudStorageServices[0]).uploadJobFiles(with(any(VEGLJob.class)), with(any(File[].class))); - - oneOf(mockPortalUser).getUsername();will(returnValue(mockUser)); - allowing(mockPortalUser).getEmail();will(returnValue(user.getEmail())); - oneOf(mockPortalUser).getAwsKeyName();will(returnValue(computeKeyName)); - allowing(mockPortalUser).getId();will(returnValue(mockUser)); - oneOf(mockAnvglProvenanceService).createActivity(job, solutions, mockPortalUser);will(returnValue("")); - - allowing(mockAnvglProvenanceService).setServerURL("http://mock.fake/secure/something"); - }}); - - ModelAndView mav = controller.submitJob(mockRequest, mockResponse, job.getId().toString()); - - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that job submission fails correctly when user doesn't have permission to use - * the VMI. - */ - @Test - public void testJobSubmission_PermissionDenied() throws Exception { - //Instantiate our job object - - final String computeServiceId = "ccsid"; - final String injectedComputeVmId = "injected-compute-vmi-id"; - final File mockFile1 = new File("MockFile1"); - final File mockFile2 = new File("MockFile2"); - final StagedFile[] stageInFiles = new StagedFile[] {new StagedFile(job, "mockFile1", mockFile1), new StagedFile(job, "mockFile2", mockFile2)}; - final String jobInSavedState = JobBuilderController.STATUS_UNSUBMITTED; - final OutputStream mockOutputStream = new ByteArrayOutputStream(); - final VglMachineImage[] mockImages = new VglMachineImage[] {context.mock(VglMachineImage.class)}; - final String storageServiceId = "cssid"; - - job.setComputeVmId(injectedComputeVmId); - job.setStatus(jobInSavedState); // by default, the job is in SAVED state - job.setComputeServiceId(computeServiceId); - job.setStorageServiceId(storageServiceId); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - //oneOf(mockJobManager).getJobById(job.getId(), user);will(returnValue(job)); - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser);will(returnValue(job)); - oneOf(mockJobManager).saveJob(job); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - - //We should have access control check to ensure user has permission to run the job - oneOf(mockRequest).getSession();will(returnValue(mockSession)); - allowing(mockRequest).isUserInRole("a-different-role");will(returnValue(false)); - oneOf(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(mockImages)); - oneOf(mockImages[0]).getImageId();will(returnValue("compute-vmi-id")); - oneOf(mockImages[0]).getPermissions();will(returnValue(new String[] {"a-different-role"})); - - oneOf(mockFileStagingService).writeFile(job, JobBuilderController.DOWNLOAD_SCRIPT); - will(returnValue(mockOutputStream)); - - //We should have 1 call to get our stage in files - oneOf(mockFileStagingService).listStageInDirectoryFiles(job);will(returnValue(stageInFiles)); - - //And one call to upload them (which we will mock as failing) - oneOf(mockCloudStorageServices[0]).uploadJobFiles(with(equal(job)), with(any(File[].class)));will(throwException(new PortalServiceException(""))); - - //We should have 1 call to our job manager to create a job audit trail record - //oneOf(mockJobManager).createJobAuditTrail(jobInSavedState, job, errorDescription); - oneOf(mockJobManager).createJobAuditTrail(jobInSavedState, job, ""); - }}); - - ModelAndView mav = controller.submitJob(mockRequest, mockResponse, job.getId().toString()); - - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertEquals(JobBuilderController.STATUS_UNSUBMITTED, job.getStatus()); - } - - /** - * Tests that job submission fails correctly when the job doesn't exist - * @throws Exception - */ - @Test - public void testJobSubmission_JobDNE() throws Exception { - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser);will(returnValue(null)); - }}); - - ModelAndView mav = controller.submitJob(mockRequest, mockResponse, jobId); - - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that job submission fails correctly when files cannot be uploaded to S3 - * @throws Exception - */ - @Test - public void testJobSubmission_S3Failure() throws Exception { - //Instantiate our job object - final String computeVmId = "compute-vmi-id"; - final File file1 = new File("MockFile1"); - final File file2 = new File("MockFile2"); - final StagedFile[] stageInFiles = new StagedFile[] {new StagedFile(job, "mockFile1", file1), new StagedFile(job, "mockFile2", file2)}; - final String jobInSavedState = JobBuilderController.STATUS_UNSUBMITTED; - final ByteArrayOutputStream bos = new ByteArrayOutputStream(4096); - final VglMachineImage[] mockImages = new VglMachineImage[] {context.mock(VglMachineImage.class)}; - final String computeServiceId = "id-1"; - final String storageServiceId = "id-2"; - job.setComputeVmId(computeVmId); - job.setStatus(jobInSavedState); // by default, the job is in SAVED state - job.setComputeServiceId(computeServiceId); - job.setStorageServiceId(storageServiceId); - job.setJobDownloads(new ArrayList()); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object - //oneOf(mockJobManager).getJobById(job.getId(), user);will(returnValue(job)); - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser);will(returnValue(job)); - - //We should have access control check to ensure user has permission to run the job - oneOf(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(mockImages)); - oneOf(mockImages[0]).getImageId();will(returnValue("compute-vmi-id")); - oneOf(mockImages[0]).getPermissions();will(returnValue(new String[] {"testRole1"})); - allowing(mockRequest).isUserInRole("testRole1");will(returnValue(true)); - - oneOf(mockFileStagingService).writeFile(job, JobBuilderController.DOWNLOAD_SCRIPT); - will(returnValue(bos)); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - - //We should have 1 call to get our stage in files - oneOf(mockFileStagingService).listStageInDirectoryFiles(job);will(returnValue(stageInFiles)); - - //And one call to upload them (which we will mock as failing) - oneOf(mockCloudStorageServices[0]).uploadJobFiles(with(equal(job)), with(any(File[].class)));will(throwException(new PortalServiceException(""))); - - //We should have 1 call to our job manager to create a job audit trail record - oneOf(mockJobManager).createJobAuditTrail(jobInSavedState, job, ""); - }}); - - ModelAndView mav = controller.submitJob(mockRequest, mockResponse, job.getId().toString()); - - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertEquals(JobBuilderController.STATUS_UNSUBMITTED, job.getStatus()); - } - - /** - * Tests that job submission fails correctly when user specifies a storage service that DNE - */ - @Test - public void testJobSubmission_StorageServiceDNE() throws Exception { - //Instantiate our job object - final String computeServiceId = "ccsid"; - final String injectedComputeVmId = "injected-compute-vmi-id"; - final String jobInSavedState = JobBuilderController.STATUS_UNSUBMITTED; - final VglMachineImage[] mockImages = new VglMachineImage[] {context.mock(VglMachineImage.class)}; - final HashMap sessionVariables = new HashMap(); - final String storageServiceId = "cssid"; - - sessionVariables.put("user-roles", new String[] {"testRole1", "testRole2"}); - job.setComputeVmId(injectedComputeVmId); - job.setStatus(jobInSavedState); // by default, the job is in SAVED state - job.setComputeServiceId(computeServiceId); - job.setStorageServiceId("some-invalid-id"); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - //oneOf(mockJobManager).getJobById(job.getId(), user);will(returnValue(job)); - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser);will(returnValue(job)); - oneOf(mockJobManager).saveJob(job); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - - //We should have access control check to ensure user has permission to run the job - oneOf(mockRequest).getSession();will(returnValue(mockSession)); - oneOf(mockSession).getAttribute("user-roles");will(returnValue(sessionVariables.get("user-roles"))); - oneOf(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(mockImages)); - oneOf(mockImages[0]).getImageId();will(returnValue("compute-vmi-id")); - }}); - - ModelAndView mav = controller.submitJob(mockRequest, mockResponse, job.getId().toString()); - - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertEquals(JobBuilderController.STATUS_UNSUBMITTED, job.getStatus()); - } - - /** - * Tests that job submission fails correctly when user specifies a compute service that DNE - */ - @Test - public void testJobSubmission_ComputeServiceDNE() throws Exception { - //Instantiate our job object - final String computeServiceId = "ccsid"; - final String injectedComputeVmId = "injected-compute-vmi-id"; - final String jobInSavedState = JobBuilderController.STATUS_UNSUBMITTED; - final VglMachineImage[] mockImages = new VglMachineImage[] {context.mock(VglMachineImage.class)}; - final HashMap sessionVariables = new HashMap(); - final String storageServiceId = "cssid"; - - sessionVariables.put("user-roles", new String[] {"testRole1", "testRole2"}); - job.setComputeVmId(injectedComputeVmId); - job.setStatus(jobInSavedState); // by default, the job is in SAVED state - job.setComputeServiceId("some-invalid-id"); - job.setStorageServiceId(storageServiceId); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - //oneOf(mockJobManager).getJobById(job.getId(), user);will(returnValue(job)); - oneOf(mockJobManager).getJobById(job.getId(), mockPortalUser);will(returnValue(job)); - oneOf(mockJobManager).saveJob(job); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - - //We should have access control check to ensure user has permission to run the job - oneOf(mockRequest).getSession();will(returnValue(mockSession)); - oneOf(mockSession).getAttribute("user-roles");will(returnValue(sessionVariables.get("user-roles"))); - oneOf(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(mockImages)); - oneOf(mockImages[0]).getImageId();will(returnValue("compute-vmi-id")); - }}); - - ModelAndView mav = controller.submitJob(mockRequest, mockResponse, job.getId().toString()); - - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertEquals(JobBuilderController.STATUS_UNSUBMITTED, job.getStatus()); - } - - /** - * Tests that the bootstrap resource is not too long and has unix line endings and other such - * conditions. - * @throws Exception - */ - @Test - public void testBootstrapResource() throws Exception { - //see - http://docs.amazonwebservices.com/AutoScaling/latest/APIReference/API_CreateLaunchConfiguration.html - final int maxFileSize = 21847; - final int safeFileSize = maxFileSize - 1024; //arbitrary number to account for long strings being injected into bootstrap - - String contents = ResourceUtil.loadResourceAsString("org/auscope/portal/server/web/controllers/vl-bootstrap.sh"); - - Assert.assertNotNull(contents); - Assert.assertTrue("Bootstrap is empty!", contents.length() > 0); - Assert.assertTrue("Bootstrap is too big!", contents.length() < safeFileSize); - Assert.assertFalse("Boostrap needs Unix style line endings!", contents.contains("\r")); - Assert.assertEquals("Boostrap must start with '#'", '#', contents.charAt(0)); - - //We can't use variables in the form ${name} as the {} conflict with java MessageFormat - Pattern pattern = Pattern.compile("\\{(.*?)\\}"); - Matcher matcher = pattern.matcher(contents); - while (matcher.find()) { - - if (matcher.groupCount() != 1) { - continue; - } - String name = matcher.group(1); - - try { - Integer.parseInt(name); - } catch (NumberFormatException ex) { - Assert.fail(String.format("The variable ${%1$s} conflicts with java MessageFormat variables. Get rid of curly braces", name)); - } - } - } - - /** - * Tests that Grid Submit Controller's usage of the bootstrap template - * @throws Exception - */ - @Test - public void testCreateBootstrapForJob() throws Exception { - final String bucket = "stora124e-Bucket"; - final String access = "213-asd-54"; - final String secret = "tops3cret"; - final String provider = "provider"; - final String storageAuthVersion = "1.2.3"; - final String computeServiceId = "ccs"; - final String storageServiceId = "css"; - final String endpoint = "http://example.org"; - final String regionName = "region-name"; - - job.setComputeServiceId(computeServiceId); - job.setStorageServiceId(storageServiceId); - job.setStorageBucket(bucket); - - context.checking(new Expectations() {{ - //We allow calls to the Configurer which simply extract values from our property file - atLeast(1).of(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - atLeast(1).of(mockCloudStorageServices[0]).getAccessKey();will(returnValue(access)); - atLeast(1).of(mockCloudStorageServices[0]).getSecretKey();will(returnValue(secret)); - atLeast(1).of(mockCloudStorageServices[0]).getProvider();will(returnValue(provider)); - atLeast(1).of(mockCloudStorageServices[0]).getAuthVersion();will(returnValue(storageAuthVersion)); - atLeast(1).of(mockCloudStorageServices[0]).getEndpoint();will(returnValue(endpoint)); - atLeast(1).of(mockCloudStorageServices[0]).getAuthVersion();will(returnValue(storageAuthVersion)); - atLeast(1).of(mockCloudStorageServices[0]).getRegionName();will(returnValue(regionName)); - atLeast(1).of(mockCloudStorageServices[0]).getStsRequirement();will(returnValue(STSRequirement.Permissable)); - }}); - - job.setStorageBaseKey("test/key"); - - String contents = controller.createBootstrapForJob(job); - Assert.assertNotNull(contents); - Assert.assertTrue("Bootstrap is empty!", contents.length() > 0); - Assert.assertFalse("Boostrap needs Unix style line endings!", contents.contains("\r")); - Assert.assertTrue(contents.contains(bucket)); - Assert.assertTrue(contents.contains(access)); - Assert.assertTrue(contents.contains(job.getStorageBaseKey())); - Assert.assertTrue(contents.contains(secret)); - Assert.assertTrue(contents.contains(provider)); - Assert.assertTrue(contents.contains(endpoint)); - Assert.assertTrue(contents.contains(storageAuthVersion)); - Assert.assertTrue(contents.contains(vmSh)); - Assert.assertTrue(contents.contains(endpoint)); - Assert.assertTrue(contents.contains(regionName)); - } - - /** - * Tests that Grid Submit Controller's usage of the bootstrap template correctly encodes an empty - * string (when required) - * @throws Exception - */ - @Test - public void testCreateBootstrapForJob_NoOptionalValues() throws Exception { - final String bucket = "stora124e-Bucket"; - final String access = "213-asd-54"; - final String secret = "tops3cret"; - final String provider = "provider"; - final String storageAuthVersion = null; - final String regionName = null; - final String computeServiceId = "ccs"; - final String storageServiceId = "css"; - final String endpoint = "http://example.org"; - - job.setComputeServiceId(computeServiceId); - job.setStorageServiceId(storageServiceId); - job.setStorageBucket(bucket); - - context.checking(new Expectations() {{ - //We allow calls to the Configurer which simply extract values from our property file - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getAccessKey();will(returnValue(access)); - allowing(mockCloudStorageServices[0]).getSecretKey();will(returnValue(secret)); - allowing(mockCloudStorageServices[0]).getProvider();will(returnValue(provider)); - allowing(mockCloudStorageServices[0]).getAuthVersion();will(returnValue(storageAuthVersion)); - allowing(mockCloudStorageServices[0]).getEndpoint();will(returnValue(endpoint)); - allowing(mockCloudStorageServices[0]).getAuthVersion();will(returnValue(storageAuthVersion)); - allowing(mockCloudStorageServices[0]).getRegionName();will(returnValue(regionName)); - allowing(mockCloudStorageServices[0]).getStsRequirement();will(returnValue(STSRequirement.Permissable)); - }}); - - job.setStorageBaseKey("test/key"); - - String contents = controller.createBootstrapForJob(job); - Assert.assertNotNull(contents); - Assert.assertTrue(contents.contains("STORAGE_AUTH_VERSION=\"\"")); - Assert.assertTrue(contents.contains("OS_REGION_NAME=\"\"")); - } - - /** - * Tests that listing job images for a user works as expected - * @throws Exception - */ - @SuppressWarnings("rawtypes") - @Test - public void testListImages() throws Exception { - final String computeServiceId = "compute-service-id"; - final VglMachineImage[] images = new VglMachineImage[] {context.mock(VglMachineImage.class)}; - - context.checking(new Expectations() {{ - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - oneOf(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(images)); - - oneOf(images[0]).getPermissions();will(returnValue(new String[] {"testRole2"})); - allowing(mockRequest).isUserInRole("testRole2");will(returnValue(true)); - }}); - - ModelAndView mav = controller.getImagesForComputeService(mockRequest, computeServiceId, null, null); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNotNull(mav.getModel().get("data")); - Assert.assertEquals(images.length, ((Set) mav.getModel().get("data")).size()); - } - - /** - * Tests that listing job images for a user works as expected when there is an image with no restrictions - * @throws Exception - */ - @SuppressWarnings("rawtypes") - @Test - public void testListImages_NoRestrictions() throws Exception { - final HashMap sessionVariables = new HashMap(); - final String computeServiceId = "compute-service-id"; - final VglMachineImage[] images = new VglMachineImage[] {context.mock(VglMachineImage.class)}; - - sessionVariables.put("user-roles", new String[] {"testRole1", "testRole2"}); - - context.checking(new Expectations() {{ - oneOf(mockRequest).getSession();will(returnValue(mockSession)); - oneOf(mockSession).getAttribute("user-roles");will(returnValue(sessionVariables.get("user-roles"))); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - oneOf(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(images)); - - oneOf(images[0]).getPermissions();will(returnValue(null)); - }}); - - ModelAndView mav = controller.getImagesForComputeService(mockRequest, computeServiceId, null, null); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertNotNull(mav.getModel().get("data")); - Assert.assertEquals(images.length, ((Set) mav.getModel().get("data")).size()); - } - - /** - * Tests the creation of a new job object. - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void testUpdateOrCreateJob_CreateJobObject() throws Exception { - final HashMap sessionVariables = new HashMap(); - final String baseKey = "base/key"; - final String storageServiceId = "nectar-openstack-storage-melb"; - final String computeServiceId = "compute-service"; - final String computeVmType = "compute-vm-type"; - final String computeVmId = "compute-vm"; - final String name = "name"; - final String description = "desc"; - final Integer seriesId = 5431; - final boolean emailNotification = true; - - sessionVariables.put("doubleValue", 123.45); - sessionVariables.put("intValue", 123); - sessionVariables.put("notExtracted", new Object()); //this should NOT be requested - - context.checking(new Expectations() {{ - //A whole bunch of parameters will be setup based on what session variables are set - oneOf(mockRequest).getSession();will(returnValue(mockSession)); - - oneOf(mockSession).getAttributeNames();will(returnValue(new IteratorEnumeration(sessionVariables.keySet().iterator()))); - allowing(mockSession).getAttribute("doubleValue");will(returnValue(sessionVariables.get("doubleValue"))); - allowing(mockSession).getAttribute("intValue");will(returnValue(sessionVariables.get("intValue")));; - allowing(mockSession).getAttribute("notExtracted");will(returnValue(sessionVariables.get("notExtracted"))); - allowing(mockSession).getAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST);will(returnValue(null)); - allowing(mockSession).setAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST, null); - - allowing(mockPortalUser).getEmail();will(returnValue("email@example.org")); - allowing(mockPortalUser).getArnExecution(); will(returnValue(null)); - allowing(mockPortalUser).getArnStorage(); will(returnValue(null)); - allowing(mockPortalUser).getAwsSecret(); will(returnValue(null)); - allowing(mockPortalUser).getAwsKeyName(); will(returnValue(null)); - allowing(mockPortalUser).getS3Bucket(); will(returnValue(null)); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - - oneOf(mockCloudStorageServices[0]).generateBaseKey(with(any(VEGLJob.class)));will(returnValue(baseKey)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudStorageServices[0]).getStsRequirement();will(returnValue(STSRequirement.Mandatory)); - - oneOf(mockFileStagingService).generateStageInDirectory(with(any(VEGLJob.class))); - - oneOf(mockJobManager).saveJob(with(any(VEGLJob.class))); //one save job to get ID - oneOf(mockJobManager).saveJob(with(any(VEGLJob.class))); //one save to finalise initialisation - oneOf(mockJobManager).saveJob(with(any(VEGLJob.class))); //one save to include updates - - //We should have 1 call to our job manager to create a job audit trail record - oneOf(mockJobManager).createJobAuditTrail(with(aNull(String.class)), with(any(VEGLJob.class)), with(any(String.class))); - - oneOf(mockCloudComputeServices[0]).getKeypair(); - }}); - - ModelAndView mav = controller.updateOrCreateJob(null, //The integer ID if not specified will trigger job creation - name, - description, - seriesId, - computeServiceId, - computeVmId, - "run-command", - computeVmType, - null, - null, - null, - null, - emailNotification, - null, - null, - mockRequest); - - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - - List data = (List)mav.getModel().get("data"); - Assert.assertNotNull(data); - Assert.assertEquals(1, data.size()); - - VEGLJob newJob = data.get(0); - Assert.assertNotNull(newJob); - Assert.assertEquals(storageServiceId, newJob.getStorageServiceId()); - Assert.assertEquals(computeServiceId, newJob.getComputeServiceId()); - Assert.assertEquals(baseKey, newJob.getStorageBaseKey()); - Assert.assertEquals(computeVmType, newJob.getComputeInstanceType()); - - Map params = newJob.getJobParameters(); - Assert.assertNotNull(params); - Assert.assertEquals(2, params.size()); - - String paramToTest = "doubleValue"; - VglParameter param = params.get(paramToTest); - Assert.assertNotNull(param); - Assert.assertEquals("number", param.getType()); - Assert.assertEquals(sessionVariables.get(paramToTest).toString(), param.getValue()); - - paramToTest = "intValue"; - param = params.get(paramToTest); - Assert.assertNotNull(param); - Assert.assertEquals("number", param.getType()); - Assert.assertEquals(sessionVariables.get(paramToTest).toString(), param.getValue()); - } - - /** - * Tests that the updateJob works as expected - * @throws Exception - */ - @Test - public void testUpdateOrCreateJob_UpdateJobSTSEnabled() throws Exception { - final int seriesId = 12; - final int jobId = 1234; - final String computeVmType = "compute-vm-type"; - final String newBaseKey = "base/key"; - final boolean emailNotification = true; - final String keypair = "vl-developers"; - final Integer walltime = Integer.valueOf(0); - final List annotations = new ArrayList(); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - - //We should have the following fields updated - oneOf(mockJob).setSeriesId(seriesId); - oneOf(mockJob).setName("name"); - oneOf(mockJob).setDescription("description"); - oneOf(mockJob).setComputeVmId("computeVmId"); - oneOf(mockJob).setComputeVmRunCommand("run-command"); - oneOf(mockJob).setComputeServiceId("computeServiceId"); - oneOf(mockJob).setStorageServiceId("nectar-openstack-storage-melb"); - oneOf(mockJob).setStorageBaseKey(newBaseKey); - oneOf(mockJob).setEmailNotification(emailNotification); - oneOf(mockJob).setComputeInstanceType(computeVmType); - oneOf(mockJob).setWalltime(walltime); - oneOf(mockJob).setAnnotations(annotations); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue("computeServiceId")); - allowing(mockCloudStorageServices[0]).getId();will(returnValue("nectar-openstack-storage-melb")); - allowing(mockCloudStorageServices[0]).getStsRequirement();will(returnValue(STSRequirement.Permissable)); - - oneOf(mockCloudStorageServices[0]).generateBaseKey(mockJob);will(returnValue(newBaseKey)); - //We should have 1 call to save our job - oneOf(mockJobManager).saveJob(mockJob); - - oneOf(mockCloudComputeServices[0]).getKeypair();will(returnValue(keypair)); - oneOf(mockJob).setComputeInstanceKey(keypair); - - oneOf(mockPortalUser).getArnStorage();will(returnValue("aws:arn")); - allowing(mockPortalUser).getS3Bucket();will(returnValue("userbucket")); - oneOf(mockJob).setStorageBucket("userbucket"); - }}); - - ModelAndView mav = controller.updateOrCreateJob(jobId, - "name", - "description", - seriesId, - "computeServiceId", - "computeVmId", - "run-command", - computeVmType, - null, - null, - null, - "registeredUrl", - emailNotification, - Integer.valueOf(walltime), - null, - mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - } - - /** - * Tests that the updateJob works as expected - * @throws Exception - */ - @Test - public void testUpdateOrCreateJob_UpdateJobSTSDisabled() throws Exception { - final int seriesId = 12; - final int jobId = 1234; - final String computeVmType = "compute-vm-type"; - final String newBaseKey = "base/key"; - final boolean emailNotification = true; - final String keypair = "vl-developers"; - final Integer walltime = Integer.valueOf(0); - final List annotations = new ArrayList(); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - - //We should have the following fields updated - oneOf(mockJob).setSeriesId(seriesId); - oneOf(mockJob).setName("name"); - oneOf(mockJob).setDescription("description"); - oneOf(mockJob).setComputeVmId("computeVmId"); - oneOf(mockJob).setComputeVmRunCommand("run-command"); - oneOf(mockJob).setComputeServiceId("computeServiceId"); - oneOf(mockJob).setStorageServiceId("nectar-openstack-storage-melb"); - oneOf(mockJob).setStorageBaseKey(newBaseKey); - oneOf(mockJob).setEmailNotification(emailNotification); - oneOf(mockJob).setComputeInstanceType(computeVmType); - oneOf(mockJob).setWalltime(walltime); - oneOf(mockJob).setAnnotations(annotations); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue("computeServiceId")); - allowing(mockCloudStorageServices[0]).getId();will(returnValue("nectar-openstack-storage-melb")); - allowing(mockCloudStorageServices[0]).getStsRequirement();will(returnValue(STSRequirement.Permissable)); - allowing(mockCloudStorageServices[0]).getBucket();will(returnValue("storagebucket")); - - oneOf(mockCloudStorageServices[0]).generateBaseKey(mockJob);will(returnValue(newBaseKey)); - //We should have 1 call to save our job - oneOf(mockJobManager).saveJob(mockJob); - - oneOf(mockCloudComputeServices[0]).getKeypair();will(returnValue(keypair)); - oneOf(mockJob).setComputeInstanceKey(keypair); - - oneOf(mockPortalUser).getArnStorage();will(returnValue(null)); - oneOf(mockJob).setStorageBucket("storagebucket"); - }}); - - ModelAndView mav = controller.updateOrCreateJob(jobId, - "name", - "description", - seriesId, - "computeServiceId", - "computeVmId", - "run-command", - computeVmType, - null, - null, - null, - "registeredUrl", - emailNotification, - Integer.valueOf(walltime), - null, - mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - } - - /** - * Tests that the updateJob works as expected - * @throws Exception - */ - @Test - public void testUpdateJobSeries() throws Exception { - final String folderName = "Name"; - final ArrayList series=new ArrayList(); - series.add(mockSeries); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(user.getEmail())); - allowing(mockPortalUser).getId();will(returnValue(userId)); - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser);will(returnValue(mockJob)); - oneOf(mockJobManager).querySeries(user.getEmail(),folderName, null);will(returnValue(series)); - oneOf(mockJob).setSeriesId(Integer.parseInt(seriesId)); - allowing(mockJob).getEmailAddress();will(returnValue(job.getEmailAddress())); - allowing(mockJob).getUser();will(returnValue(userId)); - oneOf(mockJobManager).saveJob(mockJob); - }}); - - ModelAndView mav = controller.updateJobSeries(Integer.parseInt(jobId),folderName, mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - } - - /** - * Tests that the updateJob works as expected - * @throws Exception - */ - @Test - public void testUpdateJobSeriesError() throws Exception { - final String folderName = "Name"; - final ArrayList series=new ArrayList(); - - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(user.getEmail())); - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), new PortalUser());will(returnValue(mockJob)); - oneOf(mockJobManager).querySeries(user.getEmail(),folderName, null);will(returnValue(series)); - - }}); - - ModelAndView mav = controller.updateJobSeries(Integer.parseInt(jobId),folderName, mockRequest); - Assert.assertNotNull(mav); - Assert.assertFalse((Boolean) mav.getModel().get("success")); - } - - @Test - public void testUpdateOrCreateJob_SaveFailure() throws Exception { - final boolean emailNotification = true; - final String computeVmType = "compute-vm-type"; - final Integer walltime = Integer.valueOf(0); - final List annotations = new ArrayList(); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser);will(returnValue(mockJob)); - - //We should have the following fields updated - oneOf(mockJob).setSeriesId(Integer.parseInt(seriesId)); - oneOf(mockJob).setName("name"); - oneOf(mockJob).setDescription("description"); - oneOf(mockJob).setComputeVmId("computeVmId"); - oneOf(mockJob).setComputeVmRunCommand("run-command"); - oneOf(mockJob).setComputeServiceId("computeServiceId"); - oneOf(mockJob).setStorageServiceId("nectar-openstack-storage-melb"); - oneOf(mockJob).setEmailNotification(emailNotification); - oneOf(mockJob).setComputeInstanceType(computeVmType); - oneOf(mockJob).setStorageBucket("bucket"); - oneOf(mockJob).setWalltime(walltime); - oneOf(mockJob).setAnnotations(annotations); - oneOf(mockJob).setStorageBaseKey("base/key"); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue("computeServiceId")); - allowing(mockCloudStorageServices[0]).getId();will(returnValue("nectar-openstack-storage-melb")); - allowing(mockCloudStorageServices[0]).getStsRequirement();will(returnValue(STSRequirement.ForceNone)); - allowing(mockCloudStorageServices[0]).generateBaseKey(mockJob);will(returnValue("base/key")); - allowing(mockCloudStorageServices[0]).getBucket();will(returnValue("bucket")); - - //We should have 1 call to save our job but will throw Exception - oneOf(mockJobManager).saveJob(mockJob);will(throwException(new Exception(""))); - }}); - - ModelAndView mav = controller.updateOrCreateJob(Integer.parseInt(jobId), - "name", - "description", - Integer.parseInt(seriesId), - "computeServiceId", - "computeVmId", - "run-command", - computeVmType, - null, - null, - null, - "registeredUrl", - emailNotification, - walltime, - null, - mockRequest); - Assert.assertNotNull(mav); - Assert.assertFalse((Boolean) mav.getModel().get("success")); - } - - /** - * Tests that the updateJob fails as expected with a bad compute id - * @throws Exception - */ - @Test - public void testUpdateOrCreateJob_UpdateJobWithBadComputeId() throws Exception { - final boolean emailNotification = true; - final String computeVmType = "compute-vm-type"; - final Integer walltime = Integer.valueOf(0); - final List annotations = new ArrayList(); - - context.checking(new Expectations() {{ - //We should have 1 call to our job manager to get our job object and 1 call to save it - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser);will(returnValue(mockJob)); - - //We should have the following fields updated - oneOf(mockJob).setSeriesId(Integer.parseInt(seriesId)); - oneOf(mockJob).setName("name"); - oneOf(mockJob).setDescription("description"); - oneOf(mockJob).setComputeVmId("computeVmId"); - oneOf(mockJob).setComputeVmRunCommand(null); - oneOf(mockJob).setComputeServiceId("computeServiceId"); - oneOf(mockJob).setStorageServiceId("nectar-openstack-storage-melb"); - oneOf(mockJob).setEmailNotification(emailNotification); - oneOf(mockJob).setComputeInstanceType(computeVmType); - oneOf(mockJob).setWalltime(walltime); - oneOf(mockJob).setAnnotations(annotations); - - allowing(mockCloudComputeServices[0]).getId();will(returnValue("computeServiceId-thatDNE")); - allowing(mockCloudStorageServices[0]).getId();will(returnValue("computeStorageId")); - - //We should have 1 call to save our job - oneOf(mockJobManager).saveJob(mockJob); - }}); - - ModelAndView mav = controller.updateOrCreateJob(Integer.parseInt(jobId), - "name", - "description", - Integer.parseInt(seriesId), - "computeServiceId", - "computeVmId", - null, - computeVmType, - null, - null, - null, - "registeredUrl", - emailNotification, - walltime, - null, - mockRequest); - Assert.assertNotNull(mav); - Assert.assertFalse((Boolean) mav.getModel().get("success")); - } - - /** - * Tests that the updateJobDownloads works as expected when appending - * @throws Exception - */ - @Test - public void testUpdateJobDownloads_Append() throws Exception { - final String append = "true"; - final String[] names = new String[] {"n1", "n2"}; - final String[] descriptions = new String[] {"d1", "d2"}; - final String[] urls = new String[] {"http://example.org/1", "http://example.org/2"}; - final String[] localPaths = new String[] {"p1", "p2"}; - final Double[] northBoundLatitudes = new Double[] {-20.123, -21.456}; - final Double[] eastBoundLongitudes = new Double[] {120.123, 121.456}; - final Double[] southBoundLatitudes = new Double[] {-30.789, -31.0}; - final Double[] westBoundLongitudes = new Double[] {130.789, 141.0}; - final VglDownload[] existingDownloads = new VglDownload[] {new VglDownload(12356)}; - - job.setJobDownloads(new ArrayList(Arrays.asList(existingDownloads))); - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser);will(returnValue(job)); - - oneOf(mockJobManager).saveJob(job); - }}); - - ModelAndView mav = controller.updateJobDownloads(Integer.parseInt(jobId), append, names, descriptions, urls, localPaths, - northBoundLatitudes, eastBoundLongitudes, southBoundLatitudes, westBoundLongitudes); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - - //The resulting job should have 3 elements in its list (due to appending) - List dls = job.getJobDownloads(); - Assert.assertEquals(existingDownloads.length + names.length, dls.size()); - Assert.assertEquals(existingDownloads[0], dls.get(0)); - for (int i = 0; i < names.length; i++) { - VglDownload dlToTest = dls.get(existingDownloads.length + i); - Assert.assertEquals(names[i], dlToTest.getName()); - Assert.assertEquals(descriptions[i], dlToTest.getDescription()); - Assert.assertEquals(urls[i], dlToTest.getUrl()); - Assert.assertEquals(localPaths[i], dlToTest.getLocalPath()); - } - } - - /** - * Tests that the updateJobDownloads works as expected when replacing - * @throws Exception - */ - @Test - public void testUpdateJobDownloads_Replace() throws Exception { - final String append = "false"; - final String[] names = new String[] {"n1", "n2"}; - final String[] descriptions = new String[] {"d1", "d2"}; - final String[] urls = new String[] {"http://example.org/1", "http://example.org/2"}; - final String[] localPaths = new String[] {"p1", "p2"}; - final Double[] northBoundLatitudes = new Double[] {-20.123, -21.456}; - final Double[] eastBoundLongitudes = new Double[] {120.123, 121.456}; - final Double[] southBoundLatitudes = new Double[] {-30.789, -31.0}; - final Double[] westBoundLongitudes = new Double[] {130.789, 141.0}; - final VglDownload[] existingDownloads = new VglDownload[] {new VglDownload(12356)}; - - - job.setJobDownloads(new ArrayList(Arrays.asList(existingDownloads))); - - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser);will(returnValue(job)); - - oneOf(mockJobManager).saveJob(job); - }}); - - ModelAndView mav = controller.updateJobDownloads(Integer.parseInt(jobId), append, names, descriptions, urls, localPaths, - northBoundLatitudes, eastBoundLongitudes, southBoundLatitudes, westBoundLongitudes); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - - //The resulting job should have 3 elements in its list (due to appending) - List dls = job.getJobDownloads(); - Assert.assertEquals(names.length, dls.size()); - for (int i = 0; i < names.length; i++) { - VglDownload dlToTest = dls.get(i); - Assert.assertEquals(names[i], dlToTest.getName()); - Assert.assertEquals(descriptions[i], dlToTest.getDescription()); - Assert.assertEquals(urls[i], dlToTest.getUrl()); - Assert.assertEquals(localPaths[i], dlToTest.getLocalPath()); - } - } - - /** - * Unit test for succesfull object conversion in getAllJobInputs - * @throws Exception - */ - @Test - public void testGetAllJobInputs() throws Exception { - final VglDownload dl = new VglDownload(413); - final File mockFile = new File(""); - final StagedFile[] stagedFiles = new StagedFile[]{new StagedFile(job, "another/file.ext", mockFile)}; - - dl.setDescription("desc"); - dl.setEastBoundLongitude(1.0); - dl.setWestBoundLongitude(2.0); - dl.setLocalPath("local/path/file.ext"); - dl.setName("myFile"); - dl.setNorthBoundLatitude(3.0); - dl.setSouthBoundLatitude(4.0); - dl.setParent(job); - - job.setJobDownloads(Arrays.asList(dl)); - - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), mockPortalUser);will(returnValue(job)); - oneOf(mockFileStagingService).listStageInDirectoryFiles(job);will(returnValue(stagedFiles)); - }}); - - ModelAndView mav = controller.getAllJobInputs(Integer.parseInt(jobId)); - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - - Assert.assertNotNull(mav.getModel().get("data")); - @SuppressWarnings("unchecked") - List fileInfo = (List) mav.getModel().get("data"); - - Assert.assertEquals(2, fileInfo.size()); - - Assert.assertEquals(stagedFiles[0].getName(), fileInfo.get(0).getLocalPath()); - Assert.assertEquals(dl.getLocalPath(), fileInfo.get(1).getLocalPath()); - } - - /** - * Simple test to test formatting of cloud service into ModelMap objects - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void testGetComputeServices() throws Exception { - final String name = "name"; - final String id = "id"; - - context.checking(new Expectations() {{ - allowing(mockCloudComputeServices[0]).getName();will(returnValue(name)); - allowing(mockCloudComputeServices[0]).getId();will(returnValue(id)); - allowing(mockScmEntryService).getJobProviders((Integer)null, mockPortalUser);will(returnValue(null)); - oneOf(mockNciDetailsService).getByUser(mockPortalUser);will(returnValue(null)); - }}); - - ModelAndView mav = controller.getComputeServices(null); - - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - - ModelMap test = ((List)mav.getModel().get("data")).get(0); - - Assert.assertEquals(name, test.get("name")); - Assert.assertEquals(id, test.get("id")); - } - - /** - * Simple test to test formatting of cloud service into ModelMap objects - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void testGetStorageServices() throws Exception { - final String name = "name"; - final String id = "id"; - - context.checking(new Expectations() {{ - allowing(mockCloudStorageServices[0]).getName();will(returnValue(name)); - allowing(mockCloudStorageServices[0]).getId();will(returnValue(id)); - }}); - - ModelAndView mav = controller.getStorageServices(); - - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - - ModelMap test = ((List)mav.getModel().get("data")).get(0); - - Assert.assertEquals(name, test.get("name")); - Assert.assertEquals(id, test.get("id")); - } - - /** - * Tests that getting a compute type list for a particular compute service returns no exceptions - * @throws Exception - */ - @Test - public void testGetComputeTypes() throws Exception { - final String computeId = "compute-id"; - final String imageId = "image-id"; - final ComputeType[] result = new ComputeType[] {new ComputeType("m3.test-compute-type")}; - final MachineImage[] machineImages = new MachineImage[] {new MachineImage("another-image"), new MachineImage(imageId)}; - - machineImages[0].setMinimumDiskGB(200); - machineImages[1].setMinimumDiskGB(1000); - - context.checking(new Expectations() {{ - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeId)); - allowing(mockCloudComputeServices[0]).getAvailableComputeTypes("image-id");will(returnValue(result)); - allowing(mockCloudComputeServices[0]).getAvailableImages();will(returnValue(machineImages)); - }}); - - ModelAndView mav = controller.getTypesForComputeService(mockRequest, computeId, imageId); - - Assert.assertNotNull(mav); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - - Object[] actual = (Object[])mav.getModel().get("data"); - Assert.assertEquals(result.length, actual.length); - Assert.assertSame(result[0], actual[0]); - } - - /** - * Tests that getting a compute type list for a particular compute service returns no exceptions - * @throws Exception - */ - @Test - public void testGetComputeTypes_NoComputeService() throws Exception { - final String computeId = "compute-id"; - - context.checking(new Expectations() {{ - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeId)); - }}); - - ModelAndView mav = controller.getTypesForComputeService(mockRequest, "non-matching-compute-id", "image-id"); - - Assert.assertNotNull(mav); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - -} diff --git a/src/test/java/org/auscope/portal/server/web/controllers/TestJobDownloadController.java b/src/test/java/org/auscope/portal/server/web/controllers/TestJobDownloadController.java deleted file mode 100644 index 5fd8ba90b..000000000 --- a/src/test/java/org/auscope/portal/server/web/controllers/TestJobDownloadController.java +++ /dev/null @@ -1,441 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.awt.Dimension; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; -import jakarta.servlet.http.HttpSession; - -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.WCSService; -import org.auscope.portal.core.services.methodmakers.filter.FilterBoundingBox; -import org.auscope.portal.core.services.responses.csw.CSWGeographicBoundingBox; -import org.auscope.portal.core.services.responses.wcs.Resolution; -import org.auscope.portal.core.services.responses.wcs.TimeConstraint; -import org.auscope.portal.core.services.responses.wfs.WFSGetCapabilitiesResponse; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.web.service.SimpleWfsService; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.ui.ModelMap; -import org.springframework.web.servlet.ModelAndView; - -/** - * Unit tests for ERRDAPController - * @author Josh Vote - * - */ -public class TestJobDownloadController extends PortalTestClass { - private HttpServletRequest mockRequest = context.mock(HttpServletRequest.class); - private HttpServletResponse mockResponse = context.mock(HttpServletResponse.class); - private HttpSession mockSession = context.mock(HttpSession.class); - private SimpleWfsService mockWfsService = context.mock(SimpleWfsService.class); - private WCSService mockWcsService = context.mock(WCSService.class); - private JobDownloadController controller; - final String serviceUrl = "http://example.org/service"; - final String coverageUrl = "http://example.org/coverage"; - - @Before - public void setup() { - controller = new JobDownloadController(mockWfsService, mockWcsService, serviceUrl); - } - - @Test - public void testMakeErddapUrlSaveSession() { - final Double northBoundLatitude = 2.0; - final Double eastBoundLongitude = 4.0; - final Double southBoundLatitude = 1.0; - final Double westBoundLongitude = 3.0; - final String format = "nc"; - final String layerName = "layer"; - final String name = "name"; - final String description = "desc"; - final String localPath = "localPath"; - /* - final String parentName = "parent data"; - final String parentUrl = "http://example.org/service"; - final String owner = "CoolCompany@cool.com"; - */ - - - final List downloads = new ArrayList<>(); - - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - - oneOf(mockSession).getAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST);will(returnValue(downloads)); - oneOf(mockSession).setAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST, downloads); - }}); - - ModelAndView mav = controller.makeErddapUrl(northBoundLatitude, eastBoundLongitude, southBoundLatitude, westBoundLongitude, format, layerName, name, description, description, localPath, /*parentName, parentUrl, owner, */true, mockRequest, mockResponse); - Assert.assertNotNull(mav); - Assert.assertTrue(((Boolean) mav.getModel().get("success"))); - - //Check response - Assert.assertNotNull(mav.getModel().get("data")); - ModelMap data = (ModelMap) mav.getModel().get("data"); - Assert.assertEquals(northBoundLatitude, data.get("northBoundLatitude")); - Assert.assertEquals(eastBoundLongitude, data.get("eastBoundLongitude")); - Assert.assertEquals(southBoundLatitude, data.get("southBoundLatitude")); - Assert.assertEquals(westBoundLongitude, data.get("westBoundLongitude")); - Assert.assertEquals(name, data.get("name")); - Assert.assertTrue(data.get("url").toString().contains(serviceUrl)); - - //Check session variables - Assert.assertEquals(1, downloads.size()); - VglDownload download = downloads.get(0); - Assert.assertEquals(northBoundLatitude, download.getNorthBoundLatitude()); - Assert.assertEquals(eastBoundLongitude, download.getEastBoundLongitude()); - Assert.assertEquals(southBoundLatitude, download.getSouthBoundLatitude()); - Assert.assertEquals(westBoundLongitude, download.getWestBoundLongitude()); - Assert.assertEquals(name, download.getName()); - Assert.assertEquals(description, download.getDescription()); - Assert.assertEquals(localPath, download.getLocalPath()); - Assert.assertTrue(download.getUrl().startsWith(serviceUrl)); - Assert.assertTrue(download.getUrl().contains("." + format)); - Assert.assertTrue(download.getUrl().contains(layerName)); - } - - @Test - public void testMakeErddapUrlNotSaveSession() { - final Double northBoundLatitude = 2.0; - final Double eastBoundLongitude = 4.0; - final Double southBoundLatitude = 1.0; - final Double westBoundLongitude = 3.0; - final String format = "nc"; - final String layerName = "layer"; - final String name = "name"; - final String description = "desc"; - final String localPath = "localPath"; - /* - final String parentName = "parent data"; - final String parentUrl = "http://example.org/service"; - final String owner = "CoolCompany@cool.com"; - */ - - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - }}); - - ModelAndView mav = controller.makeErddapUrl(northBoundLatitude, eastBoundLongitude, southBoundLatitude, westBoundLongitude, format, layerName, name, description, description, localPath, /*parentName, parentUrl, owner, */false, mockRequest, mockResponse); - - Assert.assertNotNull(mav); - Assert.assertTrue(((Boolean) mav.getModel().get("success"))); - - //Check response - Assert.assertNotNull(mav.getModel().get("data")); - ModelMap data = (ModelMap) mav.getModel().get("data"); - Assert.assertEquals(northBoundLatitude, data.get("northBoundLatitude")); - Assert.assertEquals(eastBoundLongitude, data.get("eastBoundLongitude")); - Assert.assertEquals(southBoundLatitude, data.get("southBoundLatitude")); - Assert.assertEquals(westBoundLongitude, data.get("westBoundLongitude")); - Assert.assertEquals(name, data.get("name")); - Assert.assertTrue(data.get("url").toString().contains(serviceUrl)); - } - - @Test - public void testMakeDownloadUrlSaveSession() { - final Double northBoundLatitude = 2.0; - final Double eastBoundLongitude = 4.0; - final Double southBoundLatitude = 1.0; - final Double westBoundLongitude = 3.0; - final String name = "name"; - final String description = "desc"; - final String localPath = "localPath"; - /* - final String parentName = "parent data"; - final String parentUrl = "http://example.org/service"; - final String owner = "CoolCompany@cool.com"; - */ - - final List downloads = new ArrayList<>(); - - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - - oneOf(mockSession).getAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST);will(returnValue(downloads)); - oneOf(mockSession).setAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST, downloads); - }}); - - ModelAndView mav = controller.makeDownloadUrl(serviceUrl, name, description, description, localPath, northBoundLatitude, eastBoundLongitude, southBoundLatitude, westBoundLongitude, /*parentName, parentUrl, owner, */true, mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue(((Boolean) mav.getModel().get("success"))); - - //Check response - Assert.assertNotNull(mav.getModel().get("data")); - ModelMap data = (ModelMap) mav.getModel().get("data"); - Assert.assertEquals(northBoundLatitude, data.get("northBoundLatitude")); - Assert.assertEquals(eastBoundLongitude, data.get("eastBoundLongitude")); - Assert.assertEquals(southBoundLatitude, data.get("southBoundLatitude")); - Assert.assertEquals(westBoundLongitude, data.get("westBoundLongitude")); - Assert.assertEquals(name, data.get("name")); - Assert.assertEquals(serviceUrl, data.get("url")); - - //Check session variables - Assert.assertEquals(1, downloads.size()); - VglDownload download = downloads.get(0); - Assert.assertEquals(northBoundLatitude, download.getNorthBoundLatitude()); - Assert.assertEquals(eastBoundLongitude, download.getEastBoundLongitude()); - Assert.assertEquals(southBoundLatitude, download.getSouthBoundLatitude()); - Assert.assertEquals(westBoundLongitude, download.getWestBoundLongitude()); - Assert.assertEquals(name, download.getName()); - Assert.assertEquals(description, download.getDescription()); - Assert.assertEquals(localPath, download.getLocalPath()); - Assert.assertEquals(serviceUrl, download.getUrl()); - } - - @Test - public void testMakeDownloadUrlNotSaveSession() { - final Double northBoundLatitude = 2.0; - final Double eastBoundLongitude = 4.0; - final Double southBoundLatitude = 1.0; - final Double westBoundLongitude = 3.0; - final String name = "name"; - final String description = "desc"; - final String localPath = "localPath"; - /* - final String parentName = "parent data"; - final String parentUrl = "http://example.org/service"; - final String owner = "CoolCompany@cool.com"; - */ - - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - }}); - - ModelAndView mav = controller.makeDownloadUrl(serviceUrl, name, description, description, localPath, northBoundLatitude, eastBoundLongitude, southBoundLatitude, westBoundLongitude, /*parentName, parentUrl, owner, */false, mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue(((Boolean) mav.getModel().get("success"))); - - //Check response - Assert.assertNotNull(mav.getModel().get("data")); - ModelMap data = (ModelMap) mav.getModel().get("data"); - Assert.assertEquals(northBoundLatitude, data.get("northBoundLatitude")); - Assert.assertEquals(eastBoundLongitude, data.get("eastBoundLongitude")); - Assert.assertEquals(southBoundLatitude, data.get("southBoundLatitude")); - Assert.assertEquals(westBoundLongitude, data.get("westBoundLongitude")); - Assert.assertEquals(name, data.get("name")); - Assert.assertEquals(serviceUrl, data.get("url")); - } - - @Test - public void testMakeWfsUrlSaveSession() throws Exception { - final Double northBoundLatitude = 2.0; - final Double eastBoundLongitude = 4.0; - final Double southBoundLatitude = 1.0; - final Double westBoundLongitude = 3.0; - final String srsName = "EPSG:4326"; - final String bboxSrs = "EPSG:4387"; - final String featureType = "test:featureType"; - final String name = "name"; - final String description = "desc"; - final String localPath = "localPath"; - final String localServiceUrl = "http://example.org/wfs"; - final String outputFormat = "o-f"; - final Integer maxFeatures = null; - final List downloads = new ArrayList<>(); - final String wfsRequestString = localServiceUrl + "?request=param"; - /* - final String parentName = "parent data"; - final String parentUrl = "http://example.org/wfs"; - final String owner = "CoolCompany@cool.com"; - */ - - final String[] expectedFormats = new String[] {"format1", "format2"}; - final WFSGetCapabilitiesResponse localMockResponse = context.mock(WFSGetCapabilitiesResponse.class); - - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - - oneOf(mockWfsService).getFeatureRequestAsString(with(localServiceUrl), with(featureType), with(any(FilterBoundingBox.class)), with(maxFeatures), with(srsName), with(outputFormat)); - will(returnValue(wfsRequestString)); - - allowing(localMockResponse).getGetFeatureOutputFormats();will(returnValue(expectedFormats)); - - oneOf(mockSession).getAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST);will(returnValue(downloads)); - oneOf(mockSession).setAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST, downloads); - }}); - - ModelAndView mav = controller.makeWfsUrl(localServiceUrl, featureType, srsName, bboxSrs, - northBoundLatitude, southBoundLatitude, eastBoundLongitude, westBoundLongitude, - outputFormat, maxFeatures, name, description, description, localPath, /*parentName, parentUrl, owner, */true, mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue(((Boolean) mav.getModel().get("success"))); - - //Check response - Assert.assertNotNull(mav.getModel().get("data")); - ModelMap data = (ModelMap) mav.getModel().get("data"); - Assert.assertEquals(northBoundLatitude, data.get("northBoundLatitude")); - Assert.assertEquals(eastBoundLongitude, data.get("eastBoundLongitude")); - Assert.assertEquals(southBoundLatitude, data.get("southBoundLatitude")); - Assert.assertEquals(westBoundLongitude, data.get("westBoundLongitude")); - Assert.assertEquals(name, data.get("name")); - Assert.assertEquals(wfsRequestString, data.get("url")); - - //Check session variables - Assert.assertEquals(1, downloads.size()); - VglDownload download = downloads.get(0); - Assert.assertEquals(northBoundLatitude, download.getNorthBoundLatitude()); - Assert.assertEquals(eastBoundLongitude, download.getEastBoundLongitude()); - Assert.assertEquals(southBoundLatitude, download.getSouthBoundLatitude()); - Assert.assertEquals(westBoundLongitude, download.getWestBoundLongitude()); - Assert.assertEquals(name, download.getName()); - Assert.assertEquals(description, download.getDescription()); - Assert.assertEquals(localPath, download.getLocalPath()); - Assert.assertEquals(wfsRequestString, download.getUrl()); - } - - @Test - public void testMakeWfsUrlNotSaveSession() throws Exception { - final Double northBoundLatitude = 2.0; - final Double eastBoundLongitude = 4.0; - final Double southBoundLatitude = 1.0; - final Double westBoundLongitude = 3.0; - final String srsName = "EPSG:4326"; - final String bboxSrs = "EPSG:4387"; - final String featureType = "test:featureType"; - final String name = "name"; - final String description = "desc"; - final String localPath = "localPath"; - final String localsServiceUrl = "http://example.org/wfs"; - final String outputFormat = "o-f"; - final Integer maxFeatures = null; - final String wfsRequestString = localsServiceUrl + "?request=param"; - /* - final String parentName = "parent data"; - final String parentUrl = "http://example.org/wfs"; - final String owner = "CoolCompany@cool.com"; - */ - - final String[] expectedFormats = new String[] {"format1", "format2"}; - final WFSGetCapabilitiesResponse localMockResponse = context.mock(WFSGetCapabilitiesResponse.class); - - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - - oneOf(mockWfsService).getFeatureRequestAsString(with(localsServiceUrl), with(featureType), with(any(FilterBoundingBox.class)), with(maxFeatures), with(srsName), with(outputFormat)); - will(returnValue(wfsRequestString)); - - allowing(localMockResponse).getGetFeatureOutputFormats();will(returnValue(expectedFormats)); - }}); - - ModelAndView mav = controller.makeWfsUrl(localsServiceUrl, featureType, srsName, bboxSrs, - northBoundLatitude, southBoundLatitude, eastBoundLongitude, westBoundLongitude, - outputFormat, maxFeatures, name, description, description, localPath, /*parentName, parentUrl, owner, */false, mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue(((Boolean) mav.getModel().get("success"))); - - //Check response - Assert.assertNotNull(mav.getModel().get("data")); - ModelMap data = (ModelMap) mav.getModel().get("data"); - Assert.assertEquals(northBoundLatitude, data.get("northBoundLatitude")); - Assert.assertEquals(eastBoundLongitude, data.get("eastBoundLongitude")); - Assert.assertEquals(southBoundLatitude, data.get("southBoundLatitude")); - Assert.assertEquals(westBoundLongitude, data.get("westBoundLongitude")); - Assert.assertEquals(name, data.get("name")); - Assert.assertEquals(wfsRequestString, data.get("url")); - } - - /** - * Test the method makeWcsUrl(...) - */ - @Test - public void testMakeWcsUrl() throws PortalServiceException { - final String localsServiceUrl = "http://example.org/wfs"; - final String name = "name"; - final String coverageName = "coverageName"; - final Double northBoundLatitude = 2.0; - final Double eastBoundLongitude = 4.0; - final Double southBoundLatitude = 1.0; - final Double westBoundLongitude = 3.0; - final String bboxCrs = "EPSG:4387"; - final String outputCrs = "EPSG:4387"; - final String outputFormat = "o-f"; - final Integer outputWidth = 50; - final Integer outputHeight = 50; - final Double outputResolutionX = 1.0; - final Double outputResolutionY = 1.0; - final String description = "description"; - final String fullDescription = "fullDescription"; - final String localPath = ""; - - context.checking(new Expectations() {{ - oneOf(mockWcsService).getCoverageRequestAsString( - with(equal(localsServiceUrl)), - with(equal(coverageName)), - with(equal(outputFormat)), - with(equal(outputCrs)), - with(equal(new Dimension(outputWidth, outputHeight))), - with(equal(new Resolution(outputResolutionX, outputResolutionY))), - with(equal(bboxCrs)), - with(any(CSWGeographicBoundingBox.class)), - with(aNull(TimeConstraint.class)), - with(aNull(Map.class))); - will(returnValue(coverageUrl)); - }}); - ModelAndView mav = controller.makeWcsUrl(localsServiceUrl, coverageName, outputFormat, bboxCrs, - outputCrs, outputWidth, outputHeight, outputResolutionX, outputResolutionY, - northBoundLatitude, southBoundLatitude, eastBoundLongitude, westBoundLongitude, - name, description, fullDescription, localPath, - false, mockRequest); - Assert.assertNotNull(mav); - Assert.assertTrue(((Boolean) mav.getModel().get("success"))); - Assert.assertNotNull(mav.getModel().get("data")); - - ModelMap data = (ModelMap) mav.getModel().get("data"); - Assert.assertEquals(northBoundLatitude, data.get("northBoundLatitude")); - Assert.assertEquals(eastBoundLongitude, data.get("eastBoundLongitude")); - Assert.assertEquals(southBoundLatitude, data.get("southBoundLatitude")); - Assert.assertEquals(westBoundLongitude, data.get("westBoundLongitude")); - Assert.assertEquals(name, data.get("name")); - Assert.assertNotNull(data.get("url")); - } - - /** - * Tests that get the number of download items stored - * in user session works as expected - */ - @Test - public void testGetNumDownloadRequests() { - final List vglDownloads = new ArrayList<>(); - final VglDownload d1 = new VglDownload(1); - final VglDownload d2 = new VglDownload(2); - vglDownloads.add(d1); - vglDownloads.add(d2); - - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - allowing(mockSession).getAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST);will(returnValue(vglDownloads)); - }}); - - ModelAndView mav = controller.getNumDownloadRequests(mockRequest); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - Integer numDownloads = (Integer) mav.getModel().get("data"); - Assert.assertEquals(Integer.valueOf(2), numDownloads); - } - - /** - * Tests that get the number of download items stored - * in user session works as expected when jobDownloadList - * attribute can't be found in user session (meaning user - * has captured any data set). - */ - @Test - public void testGetNumDownloadRequests_NullJobDownloadList() { - context.checking(new Expectations() {{ - allowing(mockRequest).getSession();will(returnValue(mockSession)); - allowing(mockSession).getAttribute(JobDownloadController.SESSION_DOWNLOAD_LIST);will(returnValue(null)); - }}); - - ModelAndView mav = controller.getNumDownloadRequests(mockRequest); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - Integer numDownloads = (Integer) mav.getModel().get("data"); - Assert.assertEquals(Integer.valueOf(0), numDownloads); - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/web/controllers/TestJobListController.java b/src/test/java/org/auscope/portal/server/web/controllers/TestJobListController.java deleted file mode 100644 index 2f1ec34b9..000000000 --- a/src/test/java/org/auscope/portal/server/web/controllers/TestJobListController.java +++ /dev/null @@ -1,1309 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; - -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; - -import org.apache.commons.io.output.ByteArrayOutputStream; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageServiceJClouds; -import org.auscope.portal.core.services.cloud.FileStagingService; -import org.auscope.portal.core.services.cloud.monitor.JobStatusMonitor; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.core.test.jmock.ReadableServletOutputStream; -import org.auscope.portal.jmock.VEGLJobMatcher; -import org.auscope.portal.jmock.VEGLSeriesMatcher; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VEGLSeries; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.CloudSubmissionService; -import org.auscope.portal.server.web.service.VGLJobAuditLogService; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.web.servlet.ModelAndView; - -/** - * Unit tests for JobListController - * @author Josh Vote - * @author Richard Goh - */ -public class TestJobListController extends PortalTestClass { - private final String computeServiceId = "comp-service-id"; - private final String storageServiceId = "storage-service-id"; - private VEGLJobManager mockJobManager; - private PortalUserService mockUserService; - private CloudStorageServiceJClouds[] mockCloudStorageServices; - private FileStagingService mockFileStagingService; - private CloudComputeService[] mockCloudComputeServices; - private VGLJobStatusAndLogReader mockVGLJobStatusAndLogReader; - private VGLJobAuditLogService mockJobAuditLogService; - private PortalUser mockPortalUser; - private JobStatusMonitor mockJobStatusMonitor; - private HttpServletRequest mockRequest; - private HttpServletResponse mockResponse; - private JobListController controller; - private CloudSubmissionService mockCloudSubmissionService; - - /** - * Load our mock objects - */ - @Before - public void init() { - mockJobManager = context.mock(VEGLJobManager.class); - mockCloudStorageServices = new CloudStorageServiceJClouds[] {context.mock(CloudStorageServiceJClouds.class)}; - mockFileStagingService = context.mock(FileStagingService.class); - mockCloudComputeServices = new CloudComputeService[] {context.mock(CloudComputeService.class)}; - mockUserService = context.mock(PortalUserService.class); - mockVGLJobStatusAndLogReader = context.mock(VGLJobStatusAndLogReader.class); - mockJobStatusMonitor = context.mock(JobStatusMonitor.class); - mockResponse = context.mock(HttpServletResponse.class); - mockRequest = context.mock(HttpServletRequest.class); - mockJobAuditLogService = context.mock(VGLJobAuditLogService.class); - mockPortalUser = context.mock(PortalUser.class); - mockCloudSubmissionService = context.mock(CloudSubmissionService.class); - final List mockJobs=new ArrayList<>(); - - context.checking(new Expectations() {{ - allowing(mockCloudStorageServices[0]).getId();will(returnValue(storageServiceId)); - allowing(mockCloudComputeServices[0]).getId();will(returnValue(computeServiceId)); - allowing(mockJobManager).getInQueueJobs();will(returnValue(mockJobs)); - allowing(mockUserService).getLoggedInUser();will(returnValue(mockPortalUser)); - }}); - - controller = new JobListController(mockJobManager, - mockCloudStorageServices, mockFileStagingService, - mockCloudComputeServices, mockUserService, - mockVGLJobStatusAndLogReader, mockJobStatusMonitor,null,null,"dummy@dummy.com", mockCloudSubmissionService, mockJobAuditLogService); - } - - - - public static VEGLJobMatcher aVeglJob(Integer id) { - return new VEGLJobMatcher(id); - } - - public static VEGLJobMatcher aNonMatchingVeglJob(Integer id) { - return new VEGLJobMatcher(id, true); - } - -// /** -// * Tests getting a series from the job manager -// */ -// @Test -// public void testMySeries() { -// final String userEmail = "exampleuser@email.com"; -// final VEGLSeries series = context.mock(VEGLSeries.class); -// final List seriesList = Arrays.asList(series); -// -// context.checking(new Expectations() {{ -// allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); -// -// oneOf(mockJobManager).querySeries(userEmail, null, null);will(returnValue(seriesList)); -// }}); -// -// ModelAndView mav = controller.mySeries(mockRequest, mockResponse, mockPortalUser); -// Assert.assertTrue((Boolean)mav.getModel().get("success")); -// } -// -// /** -// * Tests getting a series when there is no email address in the user's session -// */ -// @Test -// public void testMySeriesNoEmail() { -// context.checking(new Expectations() {{ -// allowing(mockPortalUser).getEmail();will(returnValue(null)); -// }}); -// -// ModelAndView mav = controller.mySeries(mockRequest, mockResponse, mockPortalUser); -// Assert.assertFalse((Boolean)mav.getModel().get("success")); -// } -// -// /** -// * Tests getting a series when there is no email address in the user's session -// */ -// @Test -// public void testMySeriesNoUser() { -// context.checking(new Expectations() {{ -// -// }}); -// -// ModelAndView mav = controller.mySeries(mockRequest, mockResponse, null); -// Assert.assertFalse((Boolean)mav.getModel().get("success")); -// } - - /** - * Tests deleting a job successfully - * @throws PortalServiceException - */ - @Test - public void testDeleteJob() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String initialStatus = JobBuilderController.STATUS_DONE; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - - //Make sure the job marked as deleted and its transition audit trial record is created - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).setStatus(JobBuilderController.STATUS_DELETED); - oneOf(mockJobManager).saveJob(mockJob); - oneOf(mockJobManager).createJobAuditTrail(initialStatus, mockJob, "Job deleted."); - - oneOf(mockFileStagingService).deleteStageInDirectory(mockJob); - oneOf(mockJob).getRegisteredUrl();will(returnValue("geonetwork url")); - }}); - - ModelAndView mav = controller.deleteJob(mockRequest, mockResponse, jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests deleting a job successfully - */ - @Test - public void testDeleteJob_NotRegistered() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String initialStatus = JobBuilderController.STATUS_DONE; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - allowing(mockJob).getId();will(returnValue(jobId)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - - - //Make sure the job marked as deleted and its transition audit trial record is created - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).setStatus(JobBuilderController.STATUS_DELETED); - oneOf(mockJobManager).saveJob(mockJob); - oneOf(mockJobManager).createJobAuditTrail(initialStatus, mockJob, "Job deleted."); - - oneOf(mockFileStagingService).deleteStageInDirectory(mockJob); - oneOf(mockJob).getRegisteredUrl();will(returnValue(null)); //the job isn't registered - oneOf(mockCloudStorageServices[0]).deleteJobFiles(mockJob); //this must occur if the job isnt registered - }}); - - ModelAndView mav = controller.deleteJob(mockRequest, mockResponse, jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - - /** - * Tests deleting a running job successfully - */ - @Test - public void testDeleteJob_Running() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String initialStatus = JobBuilderController.STATUS_ACTIVE; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - - //Make sure the job marked as deleted and its transition audit trial record is created - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).setStatus(JobBuilderController.STATUS_DELETED); - oneOf(mockJobManager).saveJob(mockJob); - oneOf(mockJobManager).createJobAuditTrail(initialStatus, mockJob, "Job deleted."); - - oneOf(mockFileStagingService).deleteStageInDirectory(mockJob); - oneOf(mockJob).getRegisteredUrl();will(returnValue("geonetwork url")); - - oneOf(mockCloudComputeServices[0]).terminateJob(mockJob); - }}); - - ModelAndView mav = controller.deleteJob(mockRequest, mockResponse, jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests deleting a queued job successfully - */ - @Test - public void testDeleteJob_InQueue() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String initialStatus = JobBuilderController.STATUS_INQUEUE; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - - //Make sure the job marked as deleted and its transition audit trial record is created - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).getStatus();will(returnValue(initialStatus)); - oneOf(mockJob).setStatus(JobBuilderController.STATUS_DELETED); - oneOf(mockJobManager).saveJob(mockJob); - oneOf(mockJobManager).createJobAuditTrail(initialStatus, mockJob, "Job deleted."); - - oneOf(mockFileStagingService).deleteStageInDirectory(mockJob); - oneOf(mockJob).getRegisteredUrl();will(returnValue("geonetwork url")); - - oneOf(mockCloudSubmissionService).dequeueSubmission(mockJob, mockCloudComputeServices[0]); - }}); - - ModelAndView mav = controller.deleteJob(mockRequest, mockResponse, jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests deleting a job fails when its another users job - * @throws PortalServiceException - */ - @Test(expected=AccessDeniedException.class) - public void testDeleteJobNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String jobEmail = "adifferentuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(jobEmail)); - }}); - - controller.deleteJob(mockRequest, mockResponse, jobId); - } - - /** - * Tests deleting a job fails when the jobID DNE - * @throws PortalServiceException - */ - @Test - public void testDeleteJobDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(null)); - }}); - - ModelAndView mav = controller.deleteJob(mockRequest, mockResponse, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests deleting a series successfully - * @throws PortalServiceException - */ - @Test - public void testDeleteSeries() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - final List mockJobs = Arrays.asList( - context.mock(VEGLJob.class, "mockJob1"), - context.mock(VEGLJob.class, "mockJob2")); - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - allowing(mockSeries).getUser();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - oneOf(mockJobManager).getSeriesJobs(seriesId, mockPortalUser);will(returnValue(mockJobs)); - - //Make sure each job marked as deleted, its transition audit trial record - //is created and all its files in staging directory are deleted. - oneOf(mockJobs.get(0)).getStatus();will(returnValue(JobBuilderController.STATUS_PENDING)); - oneOf(mockJobs.get(0)).setStatus(JobBuilderController.STATUS_DELETED); - oneOf(mockJobManager).saveJob(mockJobs.get(0)); - oneOf(mockJobManager).createJobAuditTrail(JobBuilderController.STATUS_PENDING, mockJobs.get(0), "Job deleted."); - oneOf(mockFileStagingService).deleteStageInDirectory(mockJobs.get(0)); - oneOf(mockJobs.get(0)).getRegisteredUrl();will(returnValue("geonetwork url")); - - oneOf(mockJobs.get(1)).getStatus();will(returnValue(JobBuilderController.STATUS_DONE)); - oneOf(mockJobs.get(1)).setStatus(JobBuilderController.STATUS_DELETED); - oneOf(mockJobManager).saveJob(mockJobs.get(1)); - oneOf(mockJobManager).createJobAuditTrail(JobBuilderController.STATUS_DONE, mockJobs.get(1), "Job deleted."); - oneOf(mockFileStagingService).deleteStageInDirectory(mockJobs.get(1)); - oneOf(mockJobs.get(1)).getRegisteredUrl();will(returnValue("geonetwork url")); - - oneOf(mockJobManager).deleteSeries(mockSeries); - }}); - - ModelAndView mav = controller.deleteSeriesJobs(mockRequest, mockResponse, seriesId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests deleting a series fails when the user doesn't have permission - * @throws PortalServiceException - */ - @Test(expected=AccessDeniedException.class) - public void testDeleteSeriesNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String seriesEmail = "anotheruser@email.com"; - final int seriesId = 1234; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - allowing(mockSeries).getUser();will(returnValue(seriesEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - }}); - - controller.deleteSeriesJobs(mockRequest, mockResponse, seriesId); - } - - /** - * Tests deleting a series fails when series DNE - * @throws PortalServiceException - */ - @Test - public void testDeleteSeriesDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(null)); - }}); - - ModelAndView mav = controller.deleteSeriesJobs(mockRequest, mockResponse, seriesId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that deleting a series fails when job - * list is null. - * @throws PortalServiceException - */ - @Test - public void testDeleteSeries_JobListIsNull() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - allowing(mockSeries).getUser();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - oneOf(mockJobManager).getSeriesJobs(seriesId, mockPortalUser);will(returnValue(null)); - }}); - - ModelAndView mav = controller.deleteSeriesJobs(mockRequest, mockResponse, seriesId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - Assert.assertNull(mav.getModel().get("data")); - } - - /** - * Tests that killing or cancelling a job succeeds - */ - @Test - public void testKillJob() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - - allowing(mockJob).getStatus();will(returnValue(JobBuilderController.STATUS_PENDING)); - - oneOf(mockCloudComputeServices[0]).terminateJob(mockJob); - oneOf(mockJob).setStatus(JobBuilderController.STATUS_UNSUBMITTED); - oneOf(mockJobManager).saveJob(mockJob); - oneOf(mockJobManager).createJobAuditTrail(JobBuilderController.STATUS_PENDING, mockJob, "Job cancelled by user."); - }}); - - ModelAndView mav = controller.killJob(mockRequest, mockResponse, jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that killing or cancelling job get aborted when the job is processed - * @throws PortalServiceException - */ - @Test - public void testKillJobAborted() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getStatus();will(returnValue(JobBuilderController.STATUS_DONE)); - }}); - - ModelAndView mav = controller.killJob(mockRequest, mockResponse, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that killing a job fails when its not the user's job - * @throws PortalServiceException - */ - @Test(expected=AccessDeniedException.class) - public void testKillJobNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String jobEmail = "anotheruser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - allowing(mockJob).getUser();will(returnValue(jobEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - }}); - - controller.killJob(mockRequest, mockResponse, jobId); - } - - /** - * Tests that killing a job fails when the job cannot be found - * @throws PortalServiceException - */ - @Test - public void testKillJobDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(null)); - }}); - - ModelAndView mav = controller.killJob(mockRequest, mockResponse, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that killing or cancelling all jobs of a series succeeds - */ - @Test - public void testKillSeriesJobs() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - final List mockJobs = Arrays.asList( - context.mock(VEGLJob.class, "mockJobDone"), - context.mock(VEGLJob.class, "mockJobActive"), - context.mock(VEGLJob.class, "mockJobUnsubmitted"), - context.mock(VEGLJob.class, "mockJobPending")); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - allowing(mockSeries).getUser();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesJobs(seriesId, mockPortalUser);will(returnValue(mockJobs)); - - //Each of our jobs is in a different status - allowing(mockJobs.get(0)).getStatus();will(returnValue(JobBuilderController.STATUS_DONE)); - allowing(mockJobs.get(1)).getStatus();will(returnValue(JobBuilderController.STATUS_ACTIVE)); - allowing(mockJobs.get(2)).getStatus();will(returnValue(JobBuilderController.STATUS_UNSUBMITTED)); - allowing(mockJobs.get(3)).getStatus();will(returnValue(JobBuilderController.STATUS_PENDING)); - allowing(mockJobs.get(0)).getId();will(returnValue(Integer.valueOf(0))); - allowing(mockJobs.get(1)).getId();will(returnValue(Integer.valueOf(1))); - allowing(mockJobs.get(2)).getId();will(returnValue(Integer.valueOf(2))); - allowing(mockJobs.get(3)).getId();will(returnValue(Integer.valueOf(3))); - allowing(mockJobs.get(0)).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJobs.get(0)).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJobs.get(1)).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJobs.get(1)).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJobs.get(2)).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJobs.get(2)).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJobs.get(3)).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJobs.get(3)).getComputeServiceId();will(returnValue(computeServiceId)); - - //Only the pending and active job can be cancelled - oneOf(mockCloudComputeServices[0]).terminateJob(mockJobs.get(1)); - oneOf(mockCloudComputeServices[0]).terminateJob(mockJobs.get(3)); - oneOf(mockJobs.get(1)).setStatus(JobBuilderController.STATUS_UNSUBMITTED); - oneOf(mockJobs.get(3)).setStatus(JobBuilderController.STATUS_UNSUBMITTED); - oneOf(mockJobManager).saveJob(mockJobs.get(1)); - oneOf(mockJobManager).saveJob(mockJobs.get(3)); - oneOf(mockJobManager).createJobAuditTrail(JobBuilderController.STATUS_ACTIVE, mockJobs.get(1), "Job cancelled by user."); - oneOf(mockJobManager).createJobAuditTrail(JobBuilderController.STATUS_PENDING, mockJobs.get(3), "Job cancelled by user."); - }}); - - ModelAndView mav = controller.killSeriesJobs(mockRequest, mockResponse, seriesId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that killing all jobs of a series fails when the user lacks permission - * @throws PortalServiceException - */ - @Test(expected=AccessDeniedException.class) - public void testKillSeriesJobsNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String seriesEmail = "anotheruser@email.com"; - final int seriesId = 1234; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - allowing(mockSeries).getUser();will(returnValue(seriesEmail)); - }}); - - controller.killSeriesJobs(mockRequest, mockResponse, seriesId); - } - - /** - * Tests that killing all jobs of a series fails when the user lacks permission - * @throws PortalServiceException - */ - @Test - public void testKillSeriesJobsDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(null)); - }}); - - ModelAndView mav = controller.killSeriesJobs(mockRequest, mockResponse, seriesId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * tests listing job files succeeds - */ - @Test - public void testListJobFiles() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final CloudFileInformation[] fileDetails = new CloudFileInformation[] { - context.mock(CloudFileInformation.class, "fileInfo1"), - context.mock(CloudFileInformation.class, "fileInfo2"), - context.mock(CloudFileInformation.class, "fileInfo3") - }; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - - oneOf(mockCloudStorageServices[0]).listJobFiles(mockJob);will(returnValue(fileDetails)); - }}); - - ModelAndView mav = controller.jobCloudFiles(mockRequest, mockResponse, jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertSame(fileDetails, mav.getModel().get("data")); - } - - /** - * tests listing job files fails if the user doesnt have permission - * @throws PortalServiceException - */ - @Test(expected=AccessDeniedException.class) - public void testListJobFilesNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String jobEmail = "anotheruser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(jobEmail)); - - }}); - - controller.jobCloudFiles(mockRequest, mockResponse, jobId); - } - - /** - * tests listing job files fails if the user doesnt have permission - * @throws PortalServiceException - */ - @Test - public void testListJobFilesDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(null)); - - }}); - - ModelAndView mav = controller.jobCloudFiles(mockRequest, mockResponse, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * tests listing job files fails when the underlying S3 service fails. - */ - @Test - public void testListJobFilesServiceException() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - - oneOf(mockCloudStorageServices[0]).listJobFiles(mockJob);will(throwException(new PortalServiceException(""))); - }}); - - ModelAndView mav = controller.jobCloudFiles(mockRequest, mockResponse, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that downloading a single job file succeeds - */ - @Test - public void testDownloadJobFile() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String key = "my/file/key"; - final String fileName = "fileName.txt"; - final byte[] data = new byte[] {1,2,4,5,6,7,8,6,5,4,4,3,2,1}; - final InputStream inputStream = new ByteArrayInputStream(data); - try (final ReadableServletOutputStream outStream = new ReadableServletOutputStream()) { - context.checking(new Expectations() { - { - allowing(mockPortalUser).getEmail(); - will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser); - will(returnValue(mockJob)); - allowing(mockJob).getUser(); - will(returnValue(userEmail)); - allowing(mockJob).getStorageServiceId(); - will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId(); - will(returnValue(computeServiceId)); - - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, key); - will(returnValue(inputStream)); - - // Ensure our response stream gets written to - oneOf(mockResponse).setContentType("application/octet-stream"); - allowing(mockResponse).setHeader(with(any(String.class)), with(any(String.class))); - oneOf(mockResponse).getOutputStream(); - will(returnValue(outStream)); - } - }); - - // Returns null on success - ModelAndView mav = controller.downloadFile(mockRequest, mockResponse, jobId, fileName, key); - Assert.assertNull(mav); - - Assert.assertArrayEquals(data, outStream.getDataWritten()); - } - } - - /** - * Tests that downloading a single job file fails when the user doesnt own the job - * @throws PortalServiceException - */ - @Test(expected=AccessDeniedException.class) - public void testDownloadJobFileNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String jobEmail = "anotheruser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String key = "my/file/key"; - final String fileName = "fileName.txt"; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(jobEmail)); - }}); - - controller.downloadFile(mockRequest, mockResponse, jobId, fileName, key); - } - - /** - * Tests that downloading a single job file fails when the job DNE - * @throws PortalServiceException - */ - @Test - public void testDownloadJobFileDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final String key = "my/file/key"; - final String fileName = "fileName.txt"; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(null)); - }}); - - //Returns null on success - ModelAndView mav = controller.downloadFile(mockRequest, mockResponse, jobId, fileName, key); - Assert.assertFalse((Boolean) mav.getModel().get("success")); - } - - /** - * Tests that downloading multiple job files succeeds - */ - @Test - public void testDownloadJobFilesAsZip() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String fileKey1 = "file/key/1"; - final String fileKey2 = "file/key/2"; - final String fileKey3 = "file/key/3"; - final String files = fileKey1 + "," + fileKey2 + "," + fileKey3 + ","; - final byte[] file1Data = new byte[] {1,2,4,5,6,7,8,0,5,4,4,4,2,1}; - final byte[] file2Data = new byte[] {2,5,4,5,2,2,8,6,5,7,4,3,4,2,6}; - final byte[] file3Data = new byte[] {3,2,7,5,6,9,8,8,5,4,6,3,4}; - final InputStream is1 = new ByteArrayInputStream(file1Data); - final InputStream is2 = new ByteArrayInputStream(file2Data); - final InputStream is3 = new ByteArrayInputStream(file3Data); - final String jobName = "job WITH !()[]#$%@\\/;\"'"; - final Date submitDate = new SimpleDateFormat("yyyyMMdd").parse("19861009"); - - try (final ReadableServletOutputStream outStream = new ReadableServletOutputStream()) { - context.checking(new Expectations() { - { - allowing(mockPortalUser).getEmail(); - will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser); - will(returnValue(mockJob)); - allowing(mockJob).getName(); - will(returnValue(jobName)); - allowing(mockJob).getUser(); - will(returnValue(userEmail)); - allowing(mockJob).getStorageServiceId(); - will(returnValue(storageServiceId)); - allowing(mockJob).getComputeServiceId(); - will(returnValue(computeServiceId)); - allowing(mockJob).getSubmitDate(); - will(returnValue(submitDate)); - - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, fileKey1); - will(returnValue(is1)); - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, fileKey2); - will(returnValue(is2)); - oneOf(mockCloudStorageServices[0]).getJobFile(mockJob, fileKey3); - will(returnValue(is3)); - - // Ensure our response stream gets written to - oneOf(mockResponse).setContentType("application/zip"); - oneOf(mockResponse).setHeader("Content-Disposition", - "attachment; filename=\"jobfiles_job_WITH________________19861009.zip\""); - oneOf(mockResponse).getOutputStream(); - will(returnValue(outStream)); - } - }); - - // Returns null on success - ModelAndView mav = controller.downloadAsZip(mockRequest, mockResponse, jobId, files); - Assert.assertNull(mav); - - // Lets decompose our zip stream to verify everything got written - // correctly - ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(outStream.getDataWritten())); - byte[] buf = null; - int dataRead = 0; - - // Entry 1 - ZipEntry entry1 = zis.getNextEntry(); - Assert.assertNotNull(entry1); - Assert.assertEquals(fileKey1, entry1.getName()); - buf = new byte[file1Data.length]; - dataRead = zis.read(buf); - Assert.assertEquals(buf.length, dataRead); - Assert.assertArrayEquals(file1Data, buf); - - // Entry 2 - ZipEntry entry2 = zis.getNextEntry(); - Assert.assertNotNull(entry2); - Assert.assertEquals(fileKey2, entry2.getName()); - buf = new byte[file2Data.length]; - dataRead = zis.read(buf); - Assert.assertEquals(buf.length, dataRead); - Assert.assertArrayEquals(file2Data, buf); - - // Entry 3 - ZipEntry entry3 = zis.getNextEntry(); - Assert.assertNotNull(entry3); - Assert.assertEquals(fileKey3, entry3.getName()); - buf = new byte[file3Data.length]; - dataRead = zis.read(buf); - Assert.assertEquals(buf.length, dataRead); - Assert.assertArrayEquals(file3Data, buf); - - // And that should be it - Assert.assertNull(zis.getNextEntry()); - } - } - - /** - * Tests that downloading multiple job files fails if user doesn't own job - * @throws PortalServiceException - */ - @Test(expected=AccessDeniedException.class) - public void testDownloadJobFilesNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String jobEmail = "anotheruser@email.com"; - final int jobId = 1234; - final String files = "filekey1,filekey2"; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(jobEmail)); - }}); - - //Returns null on success - controller.downloadAsZip(mockRequest, mockResponse, jobId, files); - } - - /** - * Tests that downloading multiple job files fails if job DNE - * @throws PortalServiceException - */ - @Test - public void testDownloadJobFilesDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final String files = "filekey1,filekey2"; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(null)); - }}); - - //Returns null on success - ModelAndView mav = controller.downloadAsZip(mockRequest, mockResponse, jobId, files); - Assert.assertFalse((Boolean) mav.getModel().get("success")); - } - - /** - * Tests that querying for a set of series returns correct values - */ - @Test - public void testQuerySeries() { - final String userEmail = "exampleuser@email.com"; - final String qUser = "exampleuser@email.com"; - final String qName = "name"; - final String qDescription = "description"; - final List series = Arrays.asList( - context.mock(VEGLSeries.class, "mockSeries1"), - context.mock(VEGLSeries.class, "mockSeries2")); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).querySeries(qUser, qName, qDescription);will(returnValue(series)); - }}); - - //Returns null on success - ModelAndView mav = controller.querySeries(mockRequest, mockResponse, qName, qDescription); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - Assert.assertSame(series, mav.getModel().get("data")); - } - - /** - * Tests that querying for a set of series with no params filters via session email - */ - @Test - public void testQuerySeriesNoUser() { - final String userEmail = "exampleuser@email.com"; - final String qName = null; - final String qDescription = null; - final List series = Arrays.asList( - context.mock(VEGLSeries.class, "mockSeries1"), - context.mock(VEGLSeries.class, "mockSeries2")); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).querySeries(userEmail, null, null);will(returnValue(series)); - }}); - - //Returns null on success - ModelAndView mav = controller.querySeries(mockRequest, mockResponse, qName, qDescription); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - Assert.assertSame(series, mav.getModel().get("data")); - } - - public static VEGLSeriesMatcher aVEGLSeries(String user, String name, String description) { - return new VEGLSeriesMatcher(user, name, description); - } - - /** - * Tests that creating a folder succeeds - * @throws Exception - */ - @Test - public void testCreateFolder() { - final String userEmail = "exampleuser@email.com"; - final String qName = "default"; - final String qDescription = "Everything will now come through to a single default series"; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - oneOf(mockJobManager).saveSeries(with(aVEGLSeries(userEmail, qName, qDescription))); - }}); - - ModelAndView mav = controller.createFolder(mockRequest, qName, qDescription); - - Assert.assertTrue((Boolean) mav.getModel().get("success")); - - } - - /** - * Tests that listing a job succeeds - * @throws PortalServiceException - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void testListJobs() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - final List mockJobs = Arrays.asList( - context.mock(VEGLJob.class, "mockJobActive"), - context.mock(VEGLJob.class, "mockJobUnsubmitted"), - context.mock(VEGLJob.class, "mockJobDone"), - context.mock(VEGLJob.class, "mockJobPending") - ); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - allowing(mockSeries).getUser();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesJobs(seriesId, mockPortalUser);will(returnValue(mockJobs)); - }}); - - ModelAndView mav = controller.listJobs(mockRequest, mockResponse, seriesId, false); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - Assert.assertArrayEquals(mockJobs.toArray(), ((List) mav.getModel().get("data")).toArray()); - } - - /** - * Tests that listing a job succeeds (as well as correctly updating job statuses) - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void testListJobsWithStatusUpdate() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - final List mockJobs = Arrays.asList( - context.mock(VEGLJob.class, "mockJobActive"), - context.mock(VEGLJob.class, "mockJobUnsubmitted"), - context.mock(VEGLJob.class, "mockJobDone"), - context.mock(VEGLJob.class, "mockJobPending") - ); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - allowing(mockSeries).getUser();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesJobs(seriesId, mockPortalUser);will(returnValue(mockJobs)); - - oneOf(mockJobStatusMonitor).statusUpdate(mockJobs); - }}); - - ModelAndView mav = controller.listJobs(mockRequest, mockResponse, seriesId, true); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - Assert.assertArrayEquals(mockJobs.toArray(), ((List) mav.getModel().get("data")).toArray()); - } - - /** - * Tests that listing a job fails when its the incorrect user - * @throws PortalServiceException - * @throws Exception - */ - @Test(expected=AccessDeniedException.class) - public void testListJobsNoPermission() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final String seriesEmail = "anotheruser@email.com"; - final int seriesId = 1234; - final VEGLSeries mockSeries = context.mock(VEGLSeries.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(mockSeries)); - allowing(mockSeries).getUser();will(returnValue(seriesEmail)); - }}); - - controller.listJobs(mockRequest, mockResponse, seriesId, false); - } - - /** - * Tests that listing a job fails when its the incorrect user - * @throws PortalServiceException - * @throws Exception - */ - @Test - public void testListJobsDNE() throws PortalServiceException { - final String userEmail = "exampleuser@email.com"; - final int seriesId = 1234; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getSeriesById(seriesId, userEmail);will(returnValue(null)); - }}); - - ModelAndView mav = controller.listJobs(mockRequest, mockResponse, seriesId, false); - Assert.assertFalse((Boolean) mav.getModel().get("success")); - } - - @Test - public void testDuplicateJob() throws Exception { - final Integer jobId = 1234; - final String userEmail = "exampleuser@email.com"; - final String[] files = new String[] {"file1.txt", "file2.txt"}; - final byte[] data1 = new byte[] {1,3,4}; - final byte[] data2 = new byte[] {2,9,3,4}; - final InputStream is1 = new ByteArrayInputStream(data1); - final InputStream is2 = new ByteArrayInputStream(data2); - final CloudFileInformation[] cloudFiles = new CloudFileInformation[] { - new CloudFileInformation("long/key/file1.txt", data1.length, "http://example.org/file1"), - new CloudFileInformation("long/key/file2.txt", data2.length, "http://example.org/file2"), - new CloudFileInformation("long/key/file3.txt", 5L, "http://example.org/file3") //this will not be downloaded - }; - - final String baseKey = "base-key"; - VEGLJob existingJob = new VEGLJob(); - existingJob.setId(jobId); - existingJob.setUser(userEmail); - existingJob.setComputeServiceId(computeServiceId); - existingJob.setStorageServiceId(storageServiceId); - - try (final ByteArrayOutputStream bos1 = new ByteArrayOutputStream(); - final ByteArrayOutputStream bos2 = new ByteArrayOutputStream()) { - - context.checking(new Expectations() { - { - allowing(mockPortalUser).getEmail(); - will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser); - will(returnValue(existingJob)); - allowing(mockJobManager).saveJob(with(aNonMatchingVeglJob(jobId))); - - oneOf(mockFileStagingService).generateStageInDirectory(with(aNonMatchingVeglJob(jobId))); - oneOf(mockFileStagingService).writeFile(with(aNonMatchingVeglJob(jobId)), - with(cloudFiles[0].getName())); - will(returnValue(bos1)); - oneOf(mockFileStagingService).writeFile(with(aNonMatchingVeglJob(jobId)), - with(cloudFiles[1].getName())); - will(returnValue(bos2)); - - oneOf(mockCloudStorageServices[0]).generateBaseKey(with(aNonMatchingVeglJob(jobId))); - will(returnValue(baseKey)); - oneOf(mockCloudStorageServices[0]).listJobFiles(with(aVeglJob(jobId))); - will(returnValue(cloudFiles)); - oneOf(mockCloudStorageServices[0]).getJobFile(with(aVeglJob(jobId)), with(cloudFiles[0].getName())); - will(returnValue(is1)); - oneOf(mockCloudStorageServices[0]).getJobFile(with(aVeglJob(jobId)), with(cloudFiles[1].getName())); - will(returnValue(is2)); - - // We should have 1 call to our job manager to create a job - // audit trail record - oneOf(mockJobManager).createJobAuditTrail(with(aNull(String.class)), with(any(VEGLJob.class)), - with(any(String.class))); - } - }); - - ModelAndView mav = controller.duplicateJob(mockRequest, mockResponse, jobId, files); - Assert.assertTrue((Boolean) mav.getModel().get("success")); - - byte[] fis1Data = bos1.toByteArray(); - byte[] fis2Data = bos2.toByteArray(); - - Assert.assertArrayEquals(data1, fis1Data); - Assert.assertArrayEquals(data2, fis2Data); - } - } - - /** - * Tests requesting instance logs in the best case scenario - */ - @Test - public void testGetRawInstanceLogs() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - final String consoleData = "console\ndata\n"; - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(jobId)); - - oneOf(mockCloudComputeServices[0]).getConsoleLog(with(mockJob), with(any(Integer.class))); - will(returnValue(consoleData)); - }}); - - ModelAndView mav = controller.getRawInstanceLogs(mockRequest, jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - Assert.assertEquals(consoleData, mav.getModel().get("data")); - } - - /** - * Tests getting instance logs fails with bad service ID - */ - @Test - public void testGetRawInstanceLogs_BadService() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - allowing(mockJob).getComputeServiceId();will(returnValue("SERVICE-DNE")); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(jobId)); - - }}); - - ModelAndView mav = controller.getRawInstanceLogs(mockRequest, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests requesting instance logs fails gracefully when the underlying service throws an exception - */ - @Test - public void testGetRawInstanceLogs_UnableToRequest() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(jobId)); - - oneOf(mockCloudComputeServices[0]).getConsoleLog(with(mockJob), with(any(Integer.class))); - will(throwException(new PortalServiceException("error"))); - }}); - - ModelAndView mav = controller.getRawInstanceLogs(mockRequest, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests requesting instance logs fails gracefully when the underlying service returns null - */ - @Test - public void testGetRawInstanceLogs_NullLogs() throws Exception { - final String userEmail = "exampleuser@email.com"; - final int jobId = 1234; - final VEGLJob mockJob = context.mock(VEGLJob.class); - - context.checking(new Expectations() {{ - allowing(mockPortalUser).getEmail();will(returnValue(userEmail)); - - oneOf(mockJobManager).getJobById(jobId, mockPortalUser);will(returnValue(mockJob)); - allowing(mockJob).getUser();will(returnValue(userEmail)); - allowing(mockJob).getComputeServiceId();will(returnValue(computeServiceId)); - allowing(mockJob).getStorageServiceId();will(returnValue(storageServiceId)); - allowing(mockJob).getId();will(returnValue(jobId)); - - oneOf(mockCloudComputeServices[0]).getConsoleLog(with(mockJob), with(any(Integer.class))); - will(returnValue(null)); - }}); - - ModelAndView mav = controller.getRawInstanceLogs(mockRequest, jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } -} diff --git a/src/test/java/org/auscope/portal/server/web/controllers/TestPurchaseController.java b/src/test/java/org/auscope/portal/server/web/controllers/TestPurchaseController.java deleted file mode 100644 index 44d04f4b7..000000000 --- a/src/test/java/org/auscope/portal/server/web/controllers/TestPurchaseController.java +++ /dev/null @@ -1,389 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.nio.charset.Charset; -import java.util.Collection; -import java.util.LinkedList; -import java.util.Locale; - -import jakarta.servlet.ReadListener; -import jakarta.servlet.ServletInputStream; -import jakarta.servlet.ServletOutputStream; -import jakarta.servlet.http.Cookie; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; - -import org.apache.commons.io.IOUtils; -import org.auscope.portal.core.services.CSWFilterService; -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VGLDataPurchase; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.SimpleWfsService; -import org.auscope.portal.server.web.service.VGLPurchaseService; - -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import org.powermock.reflect.Whitebox; -import org.springframework.web.servlet.ModelAndView; - -/** - * Unit test for PurchaseController - * @author Bo Yan - * - */ -public class TestPurchaseController extends PortalTestClass { - - private SimpleWfsService mockWfsService = context.mock(SimpleWfsService.class); - private CSWFilterService mockCSWFilterService = context.mock(CSWFilterService.class); - private HttpServletRequest mockRequest = context.mock(HttpServletRequest.class); - private PortalUserService mockUserService = context.mock(PortalUserService.class); - private VGLPurchaseService mockPurchaseService = context.mock(VGLPurchaseService.class); - - private PurchaseController controller; - - final String serviceUrl = "http://example.org/service"; - - /** - * Setup controller before all the tests - */ - @Before - public void setup() { - controller = new PurchaseController(mockWfsService, mockCSWFilterService); - } - - /** - * test processDataPayment - * @throws Exception - */ - @Test - public void testProcessDataPayment() throws Exception { - - final InputStream strStream = IOUtils.toInputStream( - "{\"amount\": 0.0," + - "\"tokenId\": \"1234\"," + - "\"email\": \"test@123456789mail.com\"," + - "\"dataToPurchase\": ["+ - "{\"cswRecord\": \"csw\"}]}" - ,Charset.forName("UTF-8")); - final DelegatingServletInputStream input = new DelegatingServletInputStream(strStream); - - context.checking(new Expectations() { - { - allowing(mockRequest).getInputStream(); will(returnValue(input)); - allowing(mockUserService).getLoggedInUser(); will(returnValue(null)); // anonymous user! - } - }); - Whitebox.setInternalState(controller, "userService", mockUserService); - - HttpServletResponseImpl response = new HttpServletResponseImpl(); - controller.processDataPayment(mockRequest, response); - // Unable to process payment for anonymous user. - Assert.assertTrue(response.sw.toString().contains("anonymous user")); - } - - /** - * test processJobPayment - * @throws Exception - */ - @Test - public void testProcessJobPayment() throws Exception { - final InputStream strStream = IOUtils.toInputStream( - "{\"amount\": 0.0," + - "\"tokenId\": \"1234\"," + - "\"email\": \"test@123456789mail.com\"," + - "\"jobId\":1234," + - "\"jobName\":\"dummy\"}" - ,Charset.forName("UTF-8")); - final DelegatingServletInputStream input = new DelegatingServletInputStream(strStream); - - context.checking(new Expectations() { - { - allowing(mockRequest).getInputStream(); will(returnValue(input)); - allowing(mockUserService).getLoggedInUser(); will(returnValue(null)); // anonymous user! - } - }); - Whitebox.setInternalState(controller, "userService", mockUserService); - - HttpServletResponseImpl response = new HttpServletResponseImpl(); - controller.processJobPayment(mockRequest, response); - // Unable to process payment for anonymous user. - Assert.assertTrue(response.sw.toString().contains("anonymous user")); - } - - /** - * test getPurchases - * @throws PortalServiceException - */ - @Test - public void testGetPurchases() throws PortalServiceException { - final PortalUser user = new PortalUser(); - context.checking(new Expectations() { - { - allowing(mockUserService).getLoggedInUser(); will(returnValue(user)); - allowing(mockPurchaseService).getDataPurchasesByUser(user); will(returnValue(new LinkedList())); - } - }); - Whitebox.setInternalState(controller, "userService", mockUserService); - Whitebox.setInternalState(controller, "purchaseService", mockPurchaseService); - ModelAndView mav = controller.getPurchases(); - Assert.assertTrue(mav.getModel().containsKey("data")); - } - - private class HttpServletResponseImpl implements HttpServletResponse { - private final StringWriter sw = new StringWriter(); - private final PrintWriter pWriter = new PrintWriter(sw); - - @Override - public String getCharacterEncoding() { - // TODO Auto-generated method stub - return null; - } - - @Override - public String getContentType() { - // TODO Auto-generated method stub - return null; - } - - @Override - public ServletOutputStream getOutputStream() throws IOException { - // TODO Auto-generated method stub - return null; - } - - @Override - public PrintWriter getWriter() throws IOException { - return pWriter; - } - - @Override - public void setCharacterEncoding(String charset) { - // TODO Auto-generated method stub - - } - - @Override - public void setContentLength(int len) { - // TODO Auto-generated method stub - - } - - @Override - public void setContentLengthLong(long length) { - // TODO Auto-generated method stub - - } - - @Override - public void setContentType(String type) { - // TODO Auto-generated method stub - - } - - @Override - public void setBufferSize(int size) { - // TODO Auto-generated method stub - - } - - @Override - public int getBufferSize() { - // TODO Auto-generated method stub - return 0; - } - - @Override - public void flushBuffer() throws IOException { - // TODO Auto-generated method stub - - } - - @Override - public void resetBuffer() { - // TODO Auto-generated method stub - - } - - @Override - public boolean isCommitted() { - // TODO Auto-generated method stub - return false; - } - - @Override - public void reset() { - // TODO Auto-generated method stub - - } - - @Override - public void setLocale(Locale loc) { - // TODO Auto-generated method stub - - } - - @Override - public Locale getLocale() { - // TODO Auto-generated method stub - return null; - } - - @Override - public void addCookie(Cookie cookie) { - // TODO Auto-generated method stub - - } - - @Override - public boolean containsHeader(String name) { - // TODO Auto-generated method stub - return false; - } - - @Override - public String encodeURL(String url) { - // TODO Auto-generated method stub - return null; - } - - @Override - public String encodeRedirectURL(String url) { - // TODO Auto-generated method stub - return null; - } - - @Override - public void sendError(int sc, String msg) throws IOException { - // TODO Auto-generated method stub - - } - - @Override - public void sendError(int sc) throws IOException { - // TODO Auto-generated method stub - - } - - @Override - public void sendRedirect(String location) throws IOException { - // TODO Auto-generated method stub - - } - - @Override - public void setDateHeader(String name, long date) { - // TODO Auto-generated method stub - - } - - @Override - public void addDateHeader(String name, long date) { - // TODO Auto-generated method stub - - } - - @Override - public void setHeader(String name, String value) { - // TODO Auto-generated method stub - - } - - @Override - public void addHeader(String name, String value) { - // TODO Auto-generated method stub - - } - - @Override - public void setIntHeader(String name, int value) { - // TODO Auto-generated method stub - - } - - @Override - public void addIntHeader(String name, int value) { - // TODO Auto-generated method stub - - } - - @Override - public void setStatus(int sc) { - // TODO Auto-generated method stub - - } - - @Override - public int getStatus() { - // TODO Auto-generated method stub - return 0; - } - - @Override - public String getHeader(String name) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Collection getHeaders(String name) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Collection getHeaderNames() { - // TODO Auto-generated method stub - return null; - } - - } - - private class DelegatingServletInputStream extends ServletInputStream { - - private final InputStream sourceStream; - - /** - * Create a DelegatingServletInputStream for the given source stream. - * @param sourceStream the source stream (never null) - */ - public DelegatingServletInputStream(InputStream sourceStream) { - this.sourceStream = sourceStream; - } - - public int read() throws IOException { - return this.sourceStream.read(); - } - - public void close() throws IOException { - super.close(); - this.sourceStream.close(); - } - - @Override - public boolean isFinished() { - // TODO Auto-generated method stub - return false; - } - - @Override - public boolean isReady() { - // TODO Auto-generated method stub - return false; - } - - @Override - public void setReadListener(ReadListener listener) { - // TODO Auto-generated method stub - - } - - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/web/controllers/TestScriptBuilderController.java b/src/test/java/org/auscope/portal/server/web/controllers/TestScriptBuilderController.java deleted file mode 100644 index 5bf02cbee..000000000 --- a/src/test/java/org/auscope/portal/server/web/controllers/TestScriptBuilderController.java +++ /dev/null @@ -1,194 +0,0 @@ -package org.auscope.portal.server.web.controllers; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.core.util.FileIOUtil; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.ScmEntryService; -import org.auscope.portal.server.web.service.ScriptBuilderService; -import org.auscope.portal.server.web.service.TemplateLintService; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.springframework.security.access.AccessDeniedException; -import org.springframework.web.servlet.ModelAndView; - -@PrepareForTest({FileIOUtil.class}) -public class TestScriptBuilderController extends PortalTestClass { - private ScriptBuilderController controller; - private ScriptBuilderService mockSbService = context.mock(ScriptBuilderService.class); - private ScmEntryService mockScmEntryService = context.mock(ScmEntryService.class); - private TemplateLintService mockTemplateLintService = context.mock(TemplateLintService.class); - private VEGLJobManager mockJobManager = context.mock(VEGLJobManager.class); - private VEGLJob mockJob = context.mock(VEGLJob.class); - private CloudStorageService[] mockCloudStorageServices = new CloudStorageService[] {context.mock(CloudStorageService.class)}; - private CloudComputeService[] mockCloudComputeServices = new CloudComputeService[] {context.mock(CloudComputeService.class)}; - - private PortalUserService mockUserService = context.mock(PortalUserService.class); - - private PortalUser user; - - private static final String VM_SH = "vm.sh"; - private static final String VM_SHUTDOWN_SH = "vm-shutdown.sh"; - - @Before - public void setup() { - // Object Under Test - controller = new ScriptBuilderController(mockSbService, mockUserService, mockJobManager, mockScmEntryService, mockTemplateLintService, mockCloudStorageServices, mockCloudComputeServices, VM_SH, VM_SHUTDOWN_SH/*, nciDetailsDao*/); - user = new PortalUser(); - user.setId("456"); - user.setEmail("user@example.com"); - context.checking(new Expectations() {{ - allowing(mockJob).getEmailAddress();will(returnValue("user@example.com")); - allowing(mockJob).getUser();will(returnValue("user@example.com")); - allowing(mockUserService).getLoggedInUser();will(returnValue(user)); - }}); - } - - /** - * Tests that the saving of script for a given job succeeds. - * @throws Exception - */ - @Test - public void testSaveScript() throws Exception { - String jobId = "1"; - String sourceText = "print 'test'"; - Set solutions = new HashSet<>(); - solutions.add("http://vhirl-dev.csiro.au/scm/solutions/1"); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), user);will(returnValue(mockJob)); - oneOf(mockSbService).saveScript(mockJob, sourceText, user); - oneOf(mockScmEntryService).updateJobForSolution(mockJob, solutions, user); - }}); - - ModelAndView mav = controller.saveScript(jobId, sourceText, solutions); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that the saving of empty script for a given job fails. - */ - @Test - public void testSaveScript_EmptySourceText() { - String jobId = "1"; - String sourceText = ""; - Set solutions = new HashSet<>(); - solutions.add("http://vhirl-dev.csiro.au/scm/solutions/1"); - - ModelAndView mav = controller.saveScript(jobId, sourceText, solutions); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that the saving of script for a given job fails - * when the underlying save script service fails. - * @throws Exception - */ - @Test - public void testSaveScript_Exception() throws Exception { - String jobId = "1"; - String sourceText = "print 'test'"; - Set solutions = new HashSet<>(); - solutions.add("http://vhirl-dev.csiro.au/scm/solutions/1"); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), user);will(returnValue(mockJob)); - oneOf(mockSbService).saveScript(mockJob, sourceText, user); - will(throwException(new PortalServiceException(""))); - }}); - - ModelAndView mav = controller.saveScript(jobId, sourceText, solutions); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that the loading of script succeeds. - * @throws Exception - */ - @Test - public void testGetSavedScript() throws Exception { - String jobId = "1"; - String expectedScriptText = "print 'test'"; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), user);will(returnValue(mockJob)); - oneOf(mockSbService).loadScript(mockJob, user); - will(returnValue(expectedScriptText)); - }}); - - ModelAndView mav = controller.getSavedScript(jobId); - Assert.assertTrue((Boolean)mav.getModel().get("success")); - String script = (String)mav.getModel().get("data"); - Assert.assertEquals(expectedScriptText, script); - } - - /** - * Tests that the loading of script fails - * when the underlying load script service fails. - * @throws Exception - */ - @Test - public void testGetSavedScript_Exception() throws Exception { - String jobId = "1"; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), user);will(returnValue(mockJob)); - oneOf(mockSbService).loadScript(mockJob, user); - will(throwException(new PortalServiceException(""))); - }}); - - ModelAndView mav = controller.getSavedScript(jobId); - Assert.assertFalse((Boolean)mav.getModel().get("success")); - } - - /** - * Tests that the loading of script fails - * when the underlying load script service fails. - * @throws Exception - */ - @Test(expected=AccessDeniedException.class) - public void testGetSavedScript_BadUser() throws Exception { - final String jobId = "1"; - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getJobById(Integer.parseInt(jobId), user);will(throwException(new AccessDeniedException("error"))); - }}); - - controller.getSavedScript(jobId); - } - - /** - * Tests that the denormalised key/value pairs are turned into an appropriate map - */ - @Test - public void testTemplateParameterParsing() { - String[] keys = new String[] {"apple", "pear", "banana"}; - String[] values = new String[] {"2", "4", "6"}; - String templateName = "example.txt"; - - //The test is that the above keys/values make their way into a valid map - Map expectedMapping = new HashMap<>(); - expectedMapping.put(keys[0], values[0]); - expectedMapping.put(keys[1], values[1]); - expectedMapping.put(keys[2], values[2]); - - context.checking(new Expectations() {{ - oneOf(mockSbService).populateTemplate(with(any(String.class)), with(equal(expectedMapping))); - }}); - - controller.getTemplatedScript(templateName, keys, values); - } -} diff --git a/src/test/java/org/auscope/portal/server/web/security/TestPortalUserDetailsService.java b/src/test/java/org/auscope/portal/server/web/security/TestPortalUserDetailsService.java deleted file mode 100644 index b17c07785..000000000 --- a/src/test/java/org/auscope/portal/server/web/security/TestPortalUserDetailsService.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.auscope.portal.server.web.security; - -import java.util.HashSet; - -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.web.service.PortalUserDetailsService; -import org.junit.Assert; -import org.junit.Test; - -public class TestPortalUserDetailsService extends PortalTestClass { - - private PortalUserDetailsService userDetailsService = new PortalUserDetailsService("TEST_DEFAULT"); - - /** - * Throwaway test to ensure that we get slightly random results (i.e. - noone completely stuffed the implementation) that are valid from Amazon's point of view. - */ - @Test - public void testValidBucketNameGeneration(){ - final int ITERATION_COUNT = 10000; - - HashSet previousNames = new HashSet<>(ITERATION_COUNT); - for (int i = 0; i < ITERATION_COUNT; i++) { - String bucketName = userDetailsService.generateRandomBucketName(); - - Assert.assertNotNull(bucketName); - Assert.assertTrue("Bucket name too long", bucketName.length() < 64); - Assert.assertTrue("Bucket name too short", bucketName.length() >= 6); - Assert.assertFalse(previousNames.contains(bucketName)); - Assert.assertTrue(bucketName.matches("[a-z0-9\\-]*")); - - previousNames.add(bucketName); - } - } -} diff --git a/src/test/java/org/auscope/portal/server/web/service/ANVGLProvenanceServiceTest.java b/src/test/java/org/auscope/portal/server/web/service/ANVGLProvenanceServiceTest.java deleted file mode 100644 index f4de09e4a..000000000 --- a/src/test/java/org/auscope/portal/server/web/service/ANVGLProvenanceServiceTest.java +++ /dev/null @@ -1,265 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.io.File; -import java.io.FileInputStream; -import java.io.InputStream; -import java.io.StringWriter; -import java.net.URI; -import java.net.URL; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.lang.StringUtils; -import org.apache.http.HttpResponse; -import org.apache.http.HttpResponseFactory; -import org.apache.http.HttpVersion; -import org.apache.http.impl.DefaultHttpResponseFactory; -import org.apache.http.message.BasicStatusLine; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; -import org.auscope.portal.core.cloud.CloudFileInformation; -import org.auscope.portal.core.services.cloud.CloudStorageService; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VglDownload; -import org.auscope.portal.server.web.security.PortalUser; -import org.jmock.Expectations; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; - -import au.csiro.promsclient.Activity; -import au.csiro.promsclient.Entity; -import au.csiro.promsclient.ExternalReport; -import au.csiro.promsclient.ProvenanceReporter; -import org.junit.Assert; - -public class ANVGLProvenanceServiceTest extends PortalTestClass { - VEGLJob preparedJob; - final String serverURL = "http://portal-fake.vgl.org"; - final Model plainModel = ModelFactory.createDefaultModel(); - final CloudFileInformation fileInformation = context.mock(CloudFileInformation.class); - final int jobID = 1; - final String cloudKey = "cloudKey"; - final String cloudServiceID = "fluffy Cloud"; - final String jobName = "Cool Job"; - final String jobDescription = "Some job I made."; - final String activityFileName = "activity.ttl"; - final String PROMSURI = "http://ec2-54-213-205-234.us-west-2.compute.amazonaws.com/id/report/"; - final String mockUser = "jo@me.com"; - final Logger logger = LogManager.getLogger(ANVGLProvenanceServiceTest.class); - URI mockProfileUrl; - PortalUser mockPortalUser; - - List downloads = new ArrayList<>(); - VEGLJob turtleJob; - - final String initialTurtle = "" + "\n" + - " a ;" + "\n"; - final String initialTurtle2 = ""; - - final String intermediateTurtle = - " a ;" + System.lineSeparator() + - " " + System.lineSeparator() + - " \"activity.ttl\"^^ ;" + System.lineSeparator() + - " " + System.lineSeparator() + - " \"http://portal-fake.vgl.org/secure/jobFile.do?jobId=1&key=activity.ttl\"^^ ;" + System.lineSeparator() + - " " + System.lineSeparator() + - " ."; - - final String endedTurtle = ""; - final String serviceTurtle = ""; - - final String file1Turtle = - " a ;" + System.lineSeparator() + - " " + System.lineSeparator() + - " \"http://portal-fake.vgl.org/secure/jobFile.do?jobId=1&key=cloudKey\"^^ ;" + System.lineSeparator() + - " " + System.lineSeparator() + - " ."; - - ANVGLProvenanceService anvglProvenanceService; - final ProvenanceReporter reporter = context.mock(ProvenanceReporter.class); - - @Before - public void setUp() throws Exception { - preparedJob = context.mock(VEGLJob.class); - mockPortalUser = context.mock(PortalUser.class); - final CloudStorageService store = context.mock(CloudStorageService.class); - final CloudStorageService[] storageServices = {store}; - final ANVGLFileStagingService fileServer = context.mock(ANVGLFileStagingService.class); - final File activityFile = File.createTempFile("activity", ".ttl"); - URL turtleURL = getClass().getResource("/turtle.ttl"); - final File activityFile2 = new File(turtleURL.toURI()); - mockProfileUrl = new URI("https://plus.google.com/1"); - - VglDownload download = new VglDownload(1); - download.setUrl("http://portal-uploads.vgl.org/file1?download=true"); - download.setName("file1"); - downloads.add(download); - CloudFileInformation cloudFileInformation = new CloudFileInformation(cloudKey, 0, ""); - CloudFileInformation cloudFileModel = new CloudFileInformation(activityFileName, 0, ""); - final CloudFileInformation[] cloudList = {cloudFileInformation, cloudFileModel}; - - turtleJob = context.mock(VEGLJob.class, "Turtle Mock Job"); - - context.checking(new Expectations() {{ - allowing(preparedJob).getId(); - will(returnValue(jobID)); - allowing(preparedJob).getStorageServiceId(); - will(returnValue(cloudServiceID)); - allowing(preparedJob).getJobDownloads(); - will(returnValue(downloads)); - allowing(preparedJob).getName(); - will(returnValue(jobName)); - allowing(preparedJob).getDescription(); - will(returnValue(jobDescription)); - allowing(preparedJob).getProcessDate(); - will(returnValue(new Date())); - allowing(preparedJob).getUser(); - will(returnValue("foo@test.com")); - allowing(preparedJob).getPromsReportUrl(); - will(returnValue("http://promsurl/id/report")); - allowing(preparedJob).getExecuteDate(); - will(returnValue(new Date())); - - allowing(fileInformation).getCloudKey(); - will(returnValue(cloudKey)); - - allowing(fileServer).createLocalFile(activityFileName, preparedJob); - will(returnValue(activityFile)); - - allowing(store).getId(); - will(returnValue(cloudServiceID)); - allowing(store).listJobFiles(preparedJob); - will(returnValue(cloudList)); - allowing(store).uploadJobFiles(with(any(VEGLJob.class)), with(any(File[].class))); - allowing(store).getJobFile(preparedJob, activityFileName); - will(returnValue(new FileInputStream(activityFile2))); - - allowing(turtleJob).getId(); - will(returnValue(1)); - - allowing(mockPortalUser).getId();will(returnValue(mockUser)); - - HttpResponseFactory factory = new DefaultHttpResponseFactory(); - HttpResponse response = factory.newHttpResponse(new BasicStatusLine(HttpVersion.HTTP_1_1, 200, null), null); - allowing(reporter).postReport(with(any(URI.class)), with(any(ExternalReport.class))); - will(returnValue(response)); - }}); - - anvglProvenanceService = new ANVGLProvenanceService(fileServer, storageServices, "http://mockurl", "http://mockreportingsystemuri"); - anvglProvenanceService.setServerURL(serverURL); - } - - @After - public void tearDown() throws Exception { - - } - - @Test - public void testCreateActivity() throws Exception { - String graph = anvglProvenanceService.createActivity(preparedJob, null, mockPortalUser); - - logger.debug("testCreateActivity"); - logger.debug("GRAPH"); - logger.debug(graph.toString()); - logger.debug("INITIALTURTLE"); - logger.debug(initialTurtle); - - logger.debug("DIFF: " + StringUtils.difference(graph, initialTurtle)); - Assert.assertTrue(graph.contains(initialTurtle)); - Assert.assertTrue(graph.contains(serviceTurtle)); - } - - @Test - public void testUploadModel() throws Exception { - anvglProvenanceService.uploadModel(plainModel, preparedJob); - } - - @Test - public void testJobURL() throws Exception { - String url = ANVGLProvenanceService.jobURL(preparedJob, serverURL); - Assert.assertEquals(serverURL + "/secure/getJobObject.do?jobId=1", url); - } - - @Test - public void testOutputURL() throws Exception { - String url = ANVGLProvenanceService.outputURL(preparedJob, fileInformation, serverURL); - Assert.assertEquals(serverURL + "/secure/jobFile.do?jobId=1&key=cloudKey", url); - } - - @Test - public void testCreateEntitiesForInputs() throws Exception { - Set entities = anvglProvenanceService.createEntitiesForInputs(preparedJob, null, mockPortalUser); - Assert.assertNotNull(entities); - Assert.assertEquals(3, entities.size()); - } - - @Test - public void testPost() throws Exception { - Set outputs = new HashSet<>(); - Set usedEntities = new HashSet<>(); - InputStream activityStream = getClass().getResourceAsStream("/activity.ttl"); - Activity activity; - Model model = ModelFactory.createDefaultModel(); - model = model.read(activityStream, - serverURL, - "TURTLE"); - URI activityURI = new URI( - ANVGLProvenanceService.jobURL(turtleJob, serverURL)); - activity = new Activity().setActivityUri(activityURI).setTitle(activityURI.toString()).setFromModel(model); - if (activity != null) { - activity.setEndedAtTime(new Date()); - String outputURL = serverURL + "/secure/jobFile.do?jobId=21&key=job-macgo-bt-everbloom_gmail_com-0000000021/1000_yrRP_hazard_map.png"; - outputs.add(new Entity().setDataUri(new URI(outputURL)).setWasAttributedTo(mockProfileUrl).setTitle("1000_yrRP_hazard_map.png")); - activity.setGeneratedEntities(outputs); - outputURL = serverURL + "/secure/jobFile.do?jobId=21&key=job-macgo-bt-everbloom_gmail_com-0000000021/20_yrRP_hazard_map.png"; - usedEntities.add(new Entity().setDataUri(new URI(outputURL)).setWasAttributedTo(mockProfileUrl).setTitle("20_yrRP_hazard_map.png")); - activity.setUsedEntities(usedEntities); - final ExternalReport report = new ExternalReport() - .setActivity(activity) - .setTitle(jobName) - .setNativeId(Integer.toString(jobID)) - .setReportingSystemUri(new URI(serverURL)) - .setGeneratedAtTime(new Date()); - //final ProvenanceReporter reporter = context.mock(ProvenanceReporter.class); - HttpResponse resp = reporter.postReport(new URI(PROMSURI), report); - Assert.assertTrue((resp.getStatusLine().getStatusCode() == 200 || - resp.getStatusLine().getStatusCode() == 201)); - } - - } - - @Test - public void testSetFromModel() throws Exception { - Set outputs = new HashSet<>(); - InputStream activityStream = getClass().getResourceAsStream("/activity.ttl"); - Activity activity; - Model model = ModelFactory.createDefaultModel(); - model = model.read(activityStream, - serverURL, - "TURTLE"); - activity = new Activity().setActivityUri(new URI( - ANVGLProvenanceService.jobURL(turtleJob, serverURL))).setFromModel(model); - if (activity != null) { - activity.setEndedAtTime(new Date()); - String outputURL = serverURL + "/secure/jobFile.do?jobId=21&key=job-macgo-bt-everbloom_gmail_com-0000000021/1000_yrRP_hazard_map.png"; - outputs.add(new Entity().setDataUri(new URI(outputURL)).setWasAttributedTo(mockProfileUrl)); - activity.setGeneratedEntities(outputs); - StringWriter out = new StringWriter(); - activity.getGraph().write(out, "TURTLE", serverURL); - String turtle = out.toString(); - logger.debug(turtle); - - Assert.assertTrue(turtle.contains(initialTurtle.substring(0, 27))); - Assert.assertTrue(turtle.contains(endedTurtle)); - Assert.assertTrue(turtle.contains(outputURL)); - } - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/web/service/TestCloudSubmissionService.java b/src/test/java/org/auscope/portal/server/web/service/TestCloudSubmissionService.java deleted file mode 100644 index 9f5ad1c0a..000000000 --- a/src/test/java/org/auscope/portal/server/web/service/TestCloudSubmissionService.java +++ /dev/null @@ -1,286 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.CloudComputeService; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.service.monitor.VGLJobStatusChangeHandler; -import org.jmock.Expectations; -import org.jmock.Sequence; -import org.jmock.internal.NamedSequence; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class TestCloudSubmissionService extends PortalTestClass { - - private VEGLJobManager mockJobManager = context.mock(VEGLJobManager.class); - private CloudComputeService mockCloudComputeService = context.mock(CloudComputeService.class); - private VGLJobStatusChangeHandler mockVglJobStatusChangeHandler = context.mock(VGLJobStatusChangeHandler.class); - private ScheduledExecutorService executor; - private CloudSubmissionService service; - - - @Before - public void init() { - executor = Executors.newScheduledThreadPool(1); - service = new CloudSubmissionService(executor); - service.setJobManager(mockJobManager); - service.setVglJobStatusChangeHandler(mockVglJobStatusChangeHandler); - } - - @After - public void destroy() throws Exception{ - if (!executor.isTerminated()) { - executor.shutdownNow(); - executor.awaitTermination(2, TimeUnit.SECONDS); - } - } - - - /** - * Tests that job submission succeeds in a best case scenario - * @throws Exception - */ - @Test - public void testJobSubmission() throws Exception { - //Instantiate our job object - final String userDataString = "user-data"; - final String instanceId = "instance-id"; - final VEGLJob job = new VEGLJob(); - job.setId(213); - - job.setStatus(JobBuilderController.STATUS_PROVISION); - - - context.checking(new Expectations() {{ - allowing(mockCloudComputeService).getId();will(returnValue("ccs-id")); - - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(Exception.class))); - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(String.class))); - - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(returnValue(instanceId)); - oneOf(mockJobManager).saveJob(job); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_PENDING, JobBuilderController.STATUS_PROVISION); - }}); - - try { - service.queueSubmission(mockCloudComputeService, job, userDataString); - } finally { - executor.shutdown(); - } - executor.awaitTermination(1000, TimeUnit.MILLISECONDS); - - Assert.assertEquals(instanceId, job.getComputeInstanceId()); - Assert.assertNotNull(job.getSubmitDate()); - } - - /** - * Tests that jobs are updated in lock step with the internal cache. - * @throws Exception - */ - @Test - public void testJobSynchronisation() throws Exception { - //Instantiate our job object - final String userDataString = "user-data"; - final String instanceId = "instance-id"; - final VEGLJob job = new VEGLJob(); - job.setId(213); - - job.setStatus(JobBuilderController.STATUS_PROVISION); - - - context.checking(new Expectations() {{ - allowing(mockCloudComputeService).getId();will(returnValue("ccs-id")); - - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(Exception.class))); - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(String.class))); - - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(returnValue(instanceId)); - oneOf(mockJobManager).saveJob(job);will(delayReturnValue(2000L, null)); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_PENDING, JobBuilderController.STATUS_PROVISION); - }}); - - try { - service.queueSubmission(mockCloudComputeService, job, userDataString); - Thread.sleep(2000L); - Assert.assertFalse("Returned submitting status while updating job status.", service.isSubmitting(job, mockCloudComputeService)); - } finally { - executor.shutdown(); - } - executor.awaitTermination(10000, TimeUnit.MILLISECONDS); - - Assert.assertEquals(instanceId, job.getComputeInstanceId()); - Assert.assertNotNull(job.getSubmitDate()); - } - - /** - * Tests that job submission succeeds in a best case scenario - * @throws Exception - */ - @Test - public void testJobSubmissionError() throws Exception { - //Instantiate our job object - final String userDataString = "user-data"; - final VEGLJob job = new VEGLJob(); - job.setId(213); - - job.setStatus(JobBuilderController.STATUS_PROVISION); - - context.checking(new Expectations() {{ - allowing(mockCloudComputeService).getId();will(returnValue("ccs-id")); - - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(Exception.class))); - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(String.class))); - - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(throwException(new PortalServiceException("error"))); - oneOf(mockJobManager).saveJob(job); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_ERROR, JobBuilderController.STATUS_PROVISION); - }}); - - try { - service.queueSubmission(mockCloudComputeService, job, userDataString); - } finally { - executor.shutdown(); - } - executor.awaitTermination(1000, TimeUnit.MILLISECONDS); - - Assert.assertNull(job.getComputeInstanceId()); - Assert.assertNull(job.getSubmitDate()); - } - - /** - * Tests that job submission succeeds in a best case scenario - * @throws Exception - */ - @Test - public void testJobSubmission_NoInstanceId() throws Exception { - //Instantiate our job object - final String userDataString = "user-data"; - final VEGLJob job = new VEGLJob(); - job.setId(213); - - job.setStatus(JobBuilderController.STATUS_PROVISION); - - context.checking(new Expectations() {{ - allowing(mockCloudComputeService).getId();will(returnValue("ccs-id")); - - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(Exception.class))); - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(String.class))); - - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(returnValue(null)); - oneOf(mockJobManager).saveJob(job); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_ERROR, JobBuilderController.STATUS_PROVISION); - }}); - - try { - service.queueSubmission(mockCloudComputeService, job, userDataString); - } finally { - executor.shutdown(); - } - executor.awaitTermination(1000, TimeUnit.MILLISECONDS); - - Assert.assertNull(job.getComputeInstanceId()); - Assert.assertNull(job.getSubmitDate()); - } - - /** - * Tests that job submission submits correctly when the first is quota exceeded - * @throws Exception - */ - @Test - public void testJobSubmissionWithQueue() throws Exception { - //Instantiate our job object - final String userDataString = "user-data"; - final String instanceId = "instance-id"; - final VEGLJob job = new VEGLJob(); - job.setId(213); - - job.setStatus(JobBuilderController.STATUS_PROVISION); - - service.setQuotaResubmitTime(500L); - service.setQuotaResubmitUnits(TimeUnit.MILLISECONDS); - - Sequence sequence = new NamedSequence("queue to pending sequence"); - - context.checking(new Expectations() {{ - allowing(mockCloudComputeService).getId();will(returnValue("ccs-id")); - - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(Exception.class))); - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(String.class))); - - //Our main execution sequence goes - submit - Quota Full - In Queue - submit - pending - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(throwException(new PortalServiceException("Some random error","Some error correction with Quota exceeded")));inSequence(sequence); - oneOf(mockJobManager).saveJob(job);inSequence(sequence); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_INQUEUE, JobBuilderController.STATUS_PROVISION);inSequence(sequence); - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(returnValue(instanceId));inSequence(sequence); - oneOf(mockJobManager).saveJob(job);inSequence(sequence); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_PENDING, JobBuilderController.STATUS_INQUEUE);inSequence(sequence); - }}); - - try { - service.queueSubmission(mockCloudComputeService, job, userDataString); - Assert.assertTrue(service.isSubmitting(job, mockCloudComputeService)); - Thread.sleep(1000L); - } finally { - executor.shutdown(); - } - executor.awaitTermination(5000, TimeUnit.MILLISECONDS); - - Assert.assertEquals(instanceId, job.getComputeInstanceId()); - Assert.assertNotNull(job.getSubmitDate()); - } - - /** - * Tests that job submission handles errors when the first request was a quota error - * @throws Exception - */ - @Test - public void testJobSubmissionWithQueueError() throws Exception { - //Instantiate our job object - final String userDataString = "user-data"; - final VEGLJob job = new VEGLJob(); - job.setId(213); - - job.setStatus(JobBuilderController.STATUS_PROVISION); - - service.setQuotaResubmitTime(500L); - service.setQuotaResubmitUnits(TimeUnit.MILLISECONDS); - - Sequence sequence = new NamedSequence("queue to error sequence"); - - context.checking(new Expectations() {{ - allowing(mockCloudComputeService).getId();will(returnValue("ccs-id")); - - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(Exception.class))); - allowing(mockJobManager).createJobAuditTrail(with(any(String.class)), with(job), with(any(String.class))); - - //Our main execution sequence goes - submit - Quota Full - In Queue - submit - error - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(throwException(new PortalServiceException("Some random error","Some error correction with Quota exceeded")));inSequence(sequence); - oneOf(mockJobManager).saveJob(job);inSequence(sequence); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_INQUEUE, JobBuilderController.STATUS_PROVISION);inSequence(sequence); - oneOf(mockCloudComputeService).executeJob(with(job), with(userDataString));will(throwException(new PortalServiceException("error")));inSequence(sequence); - oneOf(mockJobManager).saveJob(job);inSequence(sequence); - oneOf(mockVglJobStatusChangeHandler).handleStatusChange(job, JobBuilderController.STATUS_ERROR, JobBuilderController.STATUS_INQUEUE);inSequence(sequence); - }}); - - try { - service.queueSubmission(mockCloudComputeService, job, userDataString); - Assert.assertTrue(service.isSubmitting(job, mockCloudComputeService)); - Thread.sleep(1000L); - } finally { - executor.shutdown(); - } - executor.awaitTermination(5000, TimeUnit.MILLISECONDS); - - Assert.assertNull(job.getComputeInstanceId()); - Assert.assertNull(job.getSubmitDate()); - } -} diff --git a/src/test/java/org/auscope/portal/server/web/service/TestScriptBuilderService.java b/src/test/java/org/auscope/portal/server/web/service/TestScriptBuilderService.java deleted file mode 100644 index 617e4fe69..000000000 --- a/src/test/java/org/auscope/portal/server/web/service/TestScriptBuilderService.java +++ /dev/null @@ -1,146 +0,0 @@ -package org.auscope.portal.server.web.service; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.util.HashMap; -import java.util.Map; - -import org.auscope.portal.core.services.PortalServiceException; -import org.auscope.portal.core.services.cloud.FileStagingService; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.web.security.PortalUser; -import org.jmock.Expectations; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -/** - * Unit tests for ScriptBuilderService - * @author Josh Vote - * @author Richard Goh - */ -public class TestScriptBuilderService extends PortalTestClass { - private ScriptBuilderService service; - private FileStagingService mockFileStagingService = context.mock(FileStagingService.class); - private VEGLJob mockJob = context.mock(VEGLJob.class); - - @Before - public void init() { - service = new ScriptBuilderService(mockFileStagingService); - } - - /** - * Tests script saving calls appropriate dependencies - * @throws Exception - */ - @Test - public void testSaveScript() throws Exception { - String script = "#a pretend script\n"; - PortalUser user = new PortalUser(); - ByteArrayOutputStream bos = new ByteArrayOutputStream(4096); - - context.checking(new Expectations() {{ - oneOf(mockFileStagingService).writeFile(mockJob, ScriptBuilderService.SCRIPT_FILE_NAME); - will(returnValue(bos)); - }}); - - service.saveScript(mockJob, script, user); - String actual = new String(bos.toByteArray()); - Assert.assertEquals(script, actual); - } - - @Test(expected=PortalServiceException.class) - public void testSaveScript_Exception() throws Exception { - String script = "#a pretend script\n"; - PortalUser user = new PortalUser(); - - context.checking(new Expectations() { - { - oneOf(mockFileStagingService).writeFile(mockJob, - ScriptBuilderService.SCRIPT_FILE_NAME); - will(throwException(new Exception())); - } - }); - - service.saveScript(mockJob, script, user); - } - - /** - * Tests script loading success scenario - */ - @Test - public void testLoadScript() throws Exception { - String script = "#a pretend script\n"; - PortalUser user = new PortalUser(); - - context.checking(new Expectations() {{ - oneOf(mockFileStagingService).readFile(mockJob, ScriptBuilderService.SCRIPT_FILE_NAME); - will(returnValue(new ByteArrayInputStream(script.getBytes()))); - }}); - - String actualScript = service.loadScript(mockJob, user); - Assert.assertEquals(script, actualScript); - } - - /** - * Tests to ensure empty string is return when the script file doesn't exist - */ - @Test - public void testLoadEmptyScript() throws Exception { - PortalUser user = new PortalUser(); - - context.checking(new Expectations() {{ - oneOf(mockFileStagingService).readFile(mockJob, ScriptBuilderService.SCRIPT_FILE_NAME); - will(returnValue(null)); - }}); - - String actualScript = service.loadScript(mockJob, user); - Assert.assertEquals("", actualScript); - } - - /** - * Tests to ensure exception is handled properly - */ - @Test(expected=PortalServiceException.class) - public void testLoadScriptError() throws Exception { - PortalUser user = new PortalUser(); - - context.checking(new Expectations() {{ - oneOf(mockFileStagingService).readFile(mockJob, ScriptBuilderService.SCRIPT_FILE_NAME); - will(throwException(new PortalServiceException("Test load script exception"))); - }}); - - service.loadScript(mockJob, user); - } - - /** - * Tests templating on a valid template string - */ - @Test - public void testTemplating() { - String template = "I have ${dog-amount} dogs and ${cat-amount} cats"; - Map values = new HashMap<>(); - values.put("dog-amount", 2); - values.put("cat-amount", "3"); - values.put("bird-amount", 4); - - String result = service.populateTemplate(template, values); - - Assert.assertEquals("I have 2 dogs and 3 cats", result); - } - - /** - * Tests templating on an invalid template string - */ - @Test - public void testTemplating_BadTemplate() { - String template = "I have ${dog-amount} dogs and ${cat-amount} cats"; - Map values = new HashMap<>(); - values.put("dog-amount", 2); - values.put("bird-amount", 4); - - String result = service.populateTemplate(template, values); - Assert.assertEquals("I have 2 dogs and ${cat-amount} cats", result); - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/web/service/monitor/TestVGLJobStatusChangeHandler.java b/src/test/java/org/auscope/portal/server/web/service/monitor/TestVGLJobStatusChangeHandler.java deleted file mode 100644 index 293ebdef1..000000000 --- a/src/test/java/org/auscope/portal/server/web/service/monitor/TestVGLJobStatusChangeHandler.java +++ /dev/null @@ -1,114 +0,0 @@ -package org.auscope.portal.server.web.service.monitor; - -import java.util.Date; - -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.vegl.VGLJobStatusAndLogReader; -import org.auscope.portal.server.vegl.mail.JobMailSender; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.service.ANVGLProvenanceService; -import org.jmock.Expectations; -import org.junit.Before; -import org.junit.Test; - -/** - * Unit tests for VGLJobStatusChangeHandler. - * - * @author Richard Goh - */ -public class TestVGLJobStatusChangeHandler extends PortalTestClass { - private VGLJobStatusChangeHandler handler; - private VEGLJobManager mockJobManager; - private JobMailSender mockJobMailSender; - private VEGLJob mockJob; - private VGLJobStatusAndLogReader mockVGLJobStatusAndLogReader; - - private ANVGLProvenanceService mockANVGLProvenanceService; - - @Before - public void init() { - //Mock objects required for the unit tests - mockJobManager = context.mock(VEGLJobManager.class); - mockJobMailSender = context.mock(JobMailSender.class); - mockJob = context.mock(VEGLJob.class); - mockVGLJobStatusAndLogReader = context.mock(VGLJobStatusAndLogReader.class); - mockANVGLProvenanceService = context.mock(ANVGLProvenanceService.class); - - //This is the component under test - handler = new VGLJobStatusChangeHandler(mockJobManager, - mockJobMailSender,mockVGLJobStatusAndLogReader, mockANVGLProvenanceService); - } - - /** - * Tests that the handle status change method do nothing - * when the job being processed has unsubmitted status. - */ - @Test - public void testHandleStatusChange_UnsubmittedJob() { - final String oldStatus = JobBuilderController.STATUS_PENDING; - final String newStatus = JobBuilderController.STATUS_UNSUBMITTED; - handler.handleStatusChange(mockJob, newStatus, oldStatus); - } - - /** - * Tests that the handle status change method succeeds - * for a completed job with email notification disabled. - */ - @Test - public void testHandleStatusChange_JobDoneAndEmailNotificationDisabled() { - final int jobId = 123; - final String oldStatus = JobBuilderController.STATUS_PENDING; - final String newStatus = JobBuilderController.STATUS_DONE; - final String timeLog = "Time is 10"; - final String executeDateLog = "01/01/2016T10:30:00"; - - context.checking(new Expectations() {{ - allowing(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getEmailNotification();will(returnValue(false)); - oneOf(mockJob).setProcessDate(with(any(Date.class))); - oneOf(mockJob).setExecuteDate(with(any(Date.class))); - oneOf(mockVGLJobStatusAndLogReader).getSectionedLog(mockJob, "Time");will(returnValue(timeLog)); - oneOf(mockVGLJobStatusAndLogReader).getSectionedLog(mockJob, "Execute");will(returnValue(executeDateLog)); - oneOf(mockJob).setProcessTimeLog(timeLog); - oneOf(mockJob).setStatus(newStatus); - allowing(mockJobManager).saveJob(mockJob); - oneOf(mockJobManager).createJobAuditTrail(oldStatus, mockJob, "Job status updated."); - oneOf(mockANVGLProvenanceService).createEntitiesForOutputs(mockJob);will(returnValue("")); - }}); - - handler.handleStatusChange(mockJob, newStatus, oldStatus); - } - - /** - * Tests that the handle status change method succeeds - * for a completed job with email notification enabled. - */ - @Test - public void testHandleStatusChange_JobDoneAndEmailNotificationEnabled() { - final int jobId = 123; - final String oldStatus = JobBuilderController.STATUS_PENDING; - final String newStatus = JobBuilderController.STATUS_DONE; - final String timeLog = "Time is 10"; - final String executeDateLog = "01/01/2016T10:30:00"; - - context.checking(new Expectations() {{ - allowing(mockJob).getId();will(returnValue(jobId)); - oneOf(mockJob).getEmailNotification();will(returnValue(true)); - oneOf(mockJob).setProcessDate(with(any(Date.class))); - oneOf(mockJob).setExecuteDate(with(any(Date.class))); - oneOf(mockVGLJobStatusAndLogReader).getSectionedLog(mockJob, "Time");will(returnValue(timeLog)); - oneOf(mockVGLJobStatusAndLogReader).getSectionedLog(mockJob, "Execute");will(returnValue(executeDateLog)); - oneOf(mockJob).setProcessTimeLog(timeLog); - oneOf(mockJob).setStatus(newStatus); - allowing(mockJobManager).saveJob(mockJob); - oneOf(mockJobManager).createJobAuditTrail(oldStatus, mockJob, "Job status updated."); - oneOf(mockJobMailSender).sendMail(mockJob); - - oneOf(mockANVGLProvenanceService).createEntitiesForOutputs(mockJob);will(returnValue("")); - }}); - - handler.handleStatusChange(mockJob, newStatus, oldStatus); - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/server/web/service/monitor/TestVGLJobStatusMonitor.java b/src/test/java/org/auscope/portal/server/web/service/monitor/TestVGLJobStatusMonitor.java deleted file mode 100644 index e1689c419..000000000 --- a/src/test/java/org/auscope/portal/server/web/service/monitor/TestVGLJobStatusMonitor.java +++ /dev/null @@ -1,106 +0,0 @@ -package org.auscope.portal.server.web.service.monitor; - -import java.util.Arrays; -import java.util.List; - -import org.auscope.portal.core.services.cloud.monitor.JobStatusException; -import org.auscope.portal.core.services.cloud.monitor.JobStatusMonitor; -import org.auscope.portal.core.test.PortalTestClass; -import org.auscope.portal.server.vegl.VEGLJob; -import org.auscope.portal.server.vegl.VEGLJobManager; -import org.auscope.portal.server.web.controllers.JobBuilderController; -import org.auscope.portal.server.web.security.PortalUser; -import org.auscope.portal.server.web.service.PortalUserService; -import org.auscope.portal.server.web.service.NCIDetailsService; -import org.jmock.Expectations; -import org.junit.Before; -import org.junit.Test; -import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; - -/** - * Unit tests for VGLJobStatusMonitor. - * - * @author Richard Goh - */ -public class TestVGLJobStatusMonitor extends PortalTestClass { - private VGLJobStatusMonitor monitor; - private JobExecutionContext mockJobExecCtx; - private VEGLJobManager mockJobManager; - private JobStatusMonitor mockJobStatusMonitor; - private PortalUserService mockUserService; - private NCIDetailsService mockNciService; - - @Before - public void init() { - //Mock objects required for the unit tests - mockJobExecCtx = context.mock(JobExecutionContext.class); - mockJobManager = context.mock(VEGLJobManager.class); - mockJobStatusMonitor = context.mock(JobStatusMonitor.class); - mockUserService = context.mock(PortalUserService.class); - mockNciService = context.mock(NCIDetailsService.class); - - //Component under test - monitor = new VGLJobStatusMonitor(); - monitor.setJobManager(mockJobManager); - monitor.setJobStatusMonitor(mockJobStatusMonitor); - monitor.setJobUserService(mockUserService); - monitor.setNciDetailsService(mockNciService); - } - - /** - * Tests that the execution of VGLJobStatusMonitor task - * run as expected. - * @throws Exception - */ - @Test - public void testExecuteInternal() throws Exception { - final VEGLJob job1 = new VEGLJob(); - job1.setId(1); - job1.setStatus(JobBuilderController.STATUS_PENDING); - - final VEGLJob job2 = new VEGLJob(); - job2.setId(2); - job2.setStatus(JobBuilderController.STATUS_ACTIVE); - - final List pendingActiveJobs = Arrays.asList(job1, job2); - final PortalUser user = new PortalUser(); - - context.checking(new Expectations() {{ - oneOf(mockJobManager).getPendingOrActiveJobs();will(returnValue(pendingActiveJobs)); - allowing(mockUserService).getByEmail(null); will(returnValue(user)); - allowing(mockNciService).getByUser(user); will(returnValue(null)); - oneOf(mockJobStatusMonitor).statusUpdate(pendingActiveJobs); - }}); - - monitor.executeInternal(mockJobExecCtx); - } - - /** - * Tests that exception caused by job status change handler - * will correctly wrap exceptions - * @throws Exception - */ - @Test(expected=JobExecutionException.class) - public void testExecuteInternal_Exception() throws Exception { - final VEGLJob job1 = new VEGLJob(); - job1.setId(1); - job1.setStatus(JobBuilderController.STATUS_PENDING); - - final VEGLJob job2 = new VEGLJob(); - job2.setId(2); - job2.setStatus(JobBuilderController.STATUS_ACTIVE); - - final List pendingActiveJobs = Arrays.asList(job1, job2); - final PortalUser user = new PortalUser(); - - context.checking(new Expectations() {{ - allowing(mockJobManager).getPendingOrActiveJobs();will(returnValue(pendingActiveJobs)); - allowing(mockUserService).getByEmail(null); will(returnValue(user)); - allowing(mockNciService).getByUser(user); will(returnValue(null)); - oneOf(mockJobStatusMonitor).statusUpdate(pendingActiveJobs);will(throwException(new JobStatusException(new Exception(), job1))); - }}); - - monitor.executeInternal(mockJobExecCtx); - } -} \ No newline at end of file diff --git a/src/test/java/org/auscope/portal/ui/SearchLayerTest.java b/src/test/java/org/auscope/portal/ui/SearchLayerTest.java deleted file mode 100644 index 22912147b..000000000 --- a/src/test/java/org/auscope/portal/ui/SearchLayerTest.java +++ /dev/null @@ -1,119 +0,0 @@ -package org.auscope.portal.ui; - -import static org.junit.Assert.assertEquals; - -import java.net.MalformedURLException; -import java.time.Duration; -import java.util.List; - -import org.openqa.selenium.By; -import org.openqa.selenium.Keys; -import org.openqa.selenium.WebDriver; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Parameters; -import org.testng.annotations.Test; - -/** - * Test search layer panel by keyword in main dev portal. - * - * @author Rini Angreani - * - */ -public class SearchLayerTest { - protected WebDriver driver = null; - protected String portal_url = null; - - @Parameters({ "browser", "version", "port", "portal_url" }) - @BeforeClass - public void setup(String browser, String version, String port, String portal) - throws MalformedURLException { - this.driver = SeleniumTestUtil.getWebDriver(browser, version, port); - this.portal_url = portal; - } - - @AfterClass - public void tearDown() { - driver.quit(); - } - - @BeforeMethod - public void openPage() { - // open portal - driver.get(portal_url); - } - - @Test - /** - * Test typing keyword in the search box and pressing enter. - */ - public void testSearchAndEnter() { - // search box - WebElement searchBox = driver.findElement(By.id("hh-searchfield-Featured-inputEl")); - - // type "tenement" in search - searchBox.sendKeys("tenement"); - searchBox.sendKeys(Keys.ENTER); - - checkTenementLayer(); - - } - - public void checkTenementLayer() { - // tenement layer header - List results = driver.findElements(By.xpath("//div[text() = 'Tenements (1 item)']")); - - // Verify that there is 1 match - assertEquals(1, results.size()); - - WebElement layerGroup = results.get(0); - // Find that toggle button next to the panel header - WebElement toggle = layerGroup.findElement(By.xpath("../following-sibling::div/img[@class='x-tool-img x-tool-expand-bottom']")); - - // expand the toggle - toggle.click(); - - // Work out the generated id i.e. 6530 - // First get the recordgrouppanel - //