Skip to content

Commit

Permalink
Remove dependency on rdf from storage (#4131)
Browse files Browse the repository at this point in the history
* Remove dependency on rdf from storage

* don't compile with java 1.8

* no, it's the other ones which shouldn't use 1.8

* don't trigger storage build if rdf/kernel change

* reduce storage scoverage threshold
  • Loading branch information
shinyhappydan authored Aug 4, 2023
1 parent 3e128a4 commit 4186060
Show file tree
Hide file tree
Showing 13 changed files with 146 additions and 61 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/ci-storage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@ name: Storage Integration Service
on:
pull_request:
paths:
- 'delta/kernel/**'
- 'delta/rdf/**'
- 'storage/**'
- 'build.sbt'
- 'project/**'
Expand Down
10 changes: 5 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ val akkaCorsVersion = "1.2.0"
val akkaVersion = "2.6.21"
val alpakkaVersion = "3.0.4"
val apacheCompressVersion = "1.23.0"
val apacheIoVersion = "1.3.2"
val awsSdkVersion = "2.17.184"
val byteBuddyAgentVersion = "1.10.17"
val betterMonadicForVersion = "0.3.1"
Expand Down Expand Up @@ -76,6 +77,7 @@ lazy val alpakkaFile = "com.lightbend.akka" %% "akka-stream-alp
lazy val alpakkaSse = "com.lightbend.akka" %% "akka-stream-alpakka-sse" % alpakkaVersion
lazy val alpakkaS3 = "com.lightbend.akka" %% "akka-stream-alpakka-s3" % alpakkaVersion
lazy val apacheCompress = "org.apache.commons" % "commons-compress" % apacheCompressVersion
lazy val apacheIo = "org.apache.commons" % "commons-io" % apacheIoVersion
lazy val awsSdk = "software.amazon.awssdk" % "s3" % awsSdkVersion
lazy val betterMonadicFor = "com.olegpy" %% "better-monadic-for" % betterMonadicForVersion
lazy val byteBuddyAgent = "net.bytebuddy" % "byte-buddy-agent" % byteBuddyAgentVersion
Expand Down Expand Up @@ -203,7 +205,6 @@ lazy val kernel = project
.settings(name := "delta-kernel", moduleName := "delta-kernel")
.settings(shared, compilation, coverage, release, assertJavaVersion)
.settings(
javaSpecificationVersion := "1.8",
libraryDependencies ++= Seq(
caffeine,
catsRetry,
Expand All @@ -227,7 +228,6 @@ lazy val testkit = project
.settings(name := "delta-testkit", moduleName := "delta-testkit")
.settings(shared, compilation, coverage, release, assertJavaVersion)
.settings(
javaSpecificationVersion := "1.8",
coverageMinimumStmtTotal := 0,
libraryDependencies ++= Seq(
akkaActorTyped, // Needed to create Uri
Expand Down Expand Up @@ -282,7 +282,6 @@ lazy val rdf = project
moduleName := "delta-rdf"
)
.settings(
javaSpecificationVersion := "1.8",
libraryDependencies ++= Seq(
akkaActorTyped, // Needed to create Uri
akkaHttpCore,
Expand Down Expand Up @@ -723,8 +722,7 @@ lazy val cargo = taskKey[(File, String)]("Run Cargo to build 'nexus-fixer'")
lazy val storage = project
.in(file("storage"))
.enablePlugins(UniversalPlugin, JavaAppPackaging, JavaAgent, DockerPlugin, BuildInfoPlugin)
.settings(shared, compilation, assertJavaVersion, kamonSettings, storageAssemblySettings, coverage, release, servicePackaging)
.dependsOn(rdf)
.settings(shared, compilation, assertJavaVersion, kamonSettings, storageAssemblySettings, coverage, release, servicePackaging, coverageMinimumStmtTotal := 75)
.settings(cargo := {
import scala.sys.process._

Expand All @@ -747,6 +745,7 @@ lazy val storage = project
javaSpecificationVersion := "1.8",
libraryDependencies ++= Seq(
apacheCompress,
apacheIo,
akkaHttp,
akkaHttpCirce,
akkaStream,
Expand All @@ -758,6 +757,7 @@ lazy val storage = project
circeGenericExtras,
logback,
monixEval,
pureconfig,
scalaLogging,
akkaHttpTestKit % Test,
akkaTestKit % Test,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
package ch.epfl.bluebrain.nexus.storage

import akka.http.scaladsl.model.{ContentType, Uri}
import ch.epfl.bluebrain.nexus.delta.rdf.implicits._
import ch.epfl.bluebrain.nexus.storage.config.Contexts.resourceCtxIri
import scala.annotation.nowarn
import ch.epfl.bluebrain.nexus.storage.jsonld.JsonLdContext.addContext
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.semiauto._
import io.circe.{Decoder, Encoder}

import scala.annotation.nowarn
import scala.util.Try

// $COVERAGE-OFF$
Expand Down Expand Up @@ -61,7 +61,7 @@ object File {
Decoder.decodeString.emap(ContentType.parse(_).left.map(_.mkString("\n")))

implicit val fileAttrEncoder: Encoder[FileAttributes] =
deriveConfiguredEncoder[FileAttributes].mapJson(_.addContext(resourceCtxIri))
deriveConfiguredEncoder[FileAttributes].mapJson(addContext(_, resourceCtxIri))
implicit val fileAttrDecoder: Decoder[FileAttributes] = deriveConfiguredDecoder[FileAttributes]
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ package ch.epfl.bluebrain.nexus.storage
import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
import akka.http.scaladsl.model.MediaTypes.`application/json`
import akka.http.scaladsl.model.{ContentTypeRange, HttpEntity}
import ch.epfl.bluebrain.nexus.delta.rdf.RdfMediaTypes
import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.{sortKeys, OrderedKeys}
import ch.epfl.bluebrain.nexus.storage.MediaTypes.`application/ld+json`
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import io.circe.syntax._
import io.circe.{Encoder, Json, JsonObject, Printer}
Expand All @@ -19,7 +19,7 @@ import scala.collection.immutable.Seq
trait JsonLdCirceSupport extends FailFastCirceSupport {

override def unmarshallerContentTypes: Seq[ContentTypeRange] =
List(`application/json`, RdfMediaTypes.`application/ld+json`)
List(`application/json`, `application/ld+json`)

/**
* `A` => HTTP entity
Expand All @@ -46,8 +46,8 @@ trait JsonLdCirceSupport extends FailFastCirceSupport {
printer: Printer = Printer.noSpaces.copy(dropNullValues = true),
keys: OrderedKeys = OrderedKeys()
): ToEntityMarshaller[Json] =
Marshaller.withFixedContentType(RdfMediaTypes.`application/ld+json`) { json =>
HttpEntity(RdfMediaTypes.`application/ld+json`, printer.print(sortKeys(json)))
Marshaller.withFixedContentType(`application/ld+json`) { json =>
HttpEntity(`application/ld+json`, printer.print(sortKeys(json)))
}

}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package ch.epfl.bluebrain.nexus.storage

import akka.http.scaladsl.model.MediaType
import akka.http.scaladsl.model.HttpCharsets.`UTF-8`

object MediaTypes {
final val `application/ld+json`: MediaType.WithFixedCharset =
MediaType.applicationWithFixedCharset("ld+json", `UTF-8`, "jsonld")
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,12 @@ import java.net.URLDecoder
import java.nio.file.StandardCopyOption._
import java.nio.file.{Files, Path, Paths}
import java.security.MessageDigest

import akka.http.scaladsl.model.Uri
import akka.stream.Materializer
import akka.stream.alpakka.file.scaladsl.Directory
import akka.stream.scaladsl.{FileIO, Keep, Sink}
import cats.effect.Effect
import cats.implicits._
import ch.epfl.bluebrain.nexus.delta.rdf.syntax._
import ch.epfl.bluebrain.nexus.storage.File._
import ch.epfl.bluebrain.nexus.storage.Rejection.{PathAlreadyExists, PathContainsLinks, PathNotFound}
import ch.epfl.bluebrain.nexus.storage.StorageError.{InternalError, PathInvalid, PermissionsFixingFailed}
Expand All @@ -22,6 +20,7 @@ import ch.epfl.bluebrain.nexus.storage.attributes.AttributesCache
import ch.epfl.bluebrain.nexus.storage.attributes.AttributesComputation._
import ch.epfl.bluebrain.nexus.storage.config.AppConfig.{DigestConfig, StorageConfig}

import scala.annotation.tailrec
import scala.concurrent.{ExecutionContext, Future}
import scala.sys.process._
import scala.util.{Success, Try}
Expand Down Expand Up @@ -116,6 +115,21 @@ trait Storages[F[_], Source] {

object Storages {

/**
* Checks if the ''target'' path is a descendant of the ''parent'' path. E.g.: path = /some/my/path ; parent = /some
* will return true E.g.: path = /some/my/path ; parent = /other will return false
*/
private def descendantOf(target: Path, parent: Path): Boolean =
inner(parent, target.getParent)

@tailrec
@SuppressWarnings(Array("NullParameter"))
private def inner(parent: Path, child: Path): Boolean = {
if (child == null) false
else if (parent == child) true
else inner(parent, child.getParent)
}

sealed trait BucketExistence
sealed trait PathExistence

Expand Down Expand Up @@ -162,7 +176,7 @@ object Storages {

def pathExists(name: String, relativeFilePath: Uri.Path): PathExistence = {
val path = filePath(name, relativeFilePath)
if (Files.exists(path) && Files.isReadable(path) && path.descendantOf(basePath(name))) PathExists
if (Files.exists(path) && Files.isReadable(path) && descendantOf(path, basePath(name))) PathExists
else PathDoesNotExist
}

Expand All @@ -172,7 +186,7 @@ object Storages {
source: AkkaSource
)(implicit bucketEv: BucketExists, pathEv: PathDoesNotExist): F[FileAttributes] = {
val absFilePath = filePath(name, relativeFilePath)
if (absFilePath.descendantOf(basePath(name)))
if (descendantOf(absFilePath, basePath(name)))
F.fromTry(Try(Files.createDirectories(absFilePath.getParent))) >>
F.fromTry(Try(MessageDigest.getInstance(digestConfig.algorithm))).flatMap { msgDigest =>
source
Expand Down Expand Up @@ -243,9 +257,9 @@ object Storages {
fixPermissions(absSourcePath).flatMap { fixPermsResult =>
if (!Files.exists(absSourcePath))
F.pure(Left(PathNotFound(name, sourceRelativePath)))
else if (!absSourcePath.descendantOf(bucketPath) || absSourcePath.descendantOf(bucketProtectedPath))
else if (!descendantOf(absSourcePath, bucketPath) || descendantOf(absSourcePath, bucketProtectedPath))
F.pure(Left(PathNotFound(name, sourceRelativePath)))
else if (!absDestPath.descendantOf(bucketProtectedPath))
else if (!descendantOf(absDestPath, bucketProtectedPath))
F.raiseError(PathInvalid(name, destRelativePath))
else if (Files.exists(absDestPath))
F.pure(Left(PathAlreadyExists(name, destRelativePath)))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package ch.epfl.bluebrain.nexus.storage

import akka.http.scaladsl.model.Uri

object UriUtils {

/**
* Adds a segment to the end of the Uri
*/
def addPath(uri: Uri, segment: String): Uri = {
if (segment.trim.isEmpty) uri
else {
val segmentStartsWithSlash = segment.startsWith("/")
val uriEndsWithSlash = uri.path.endsWithSlash
if (uriEndsWithSlash && segmentStartsWithSlash)
uri.copy(path = uri.path + segment.drop(1))
else if (uriEndsWithSlash)
uri.copy(path = uri.path + segment)
else if (segmentStartsWithSlash)
uri.copy(path = uri.path / segment.drop(1))
else
uri.copy(path = uri.path / segment)
}
}
}
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
package ch.epfl.bluebrain.nexus.storage.config

import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
import ch.epfl.bluebrain.nexus.delta.rdf.implicits._
import akka.http.scaladsl.model.Uri

object Contexts {

private val base = "https://bluebrain.github.io/nexus/contexts/"

val errorCtxIri: Iri = iri"${base}error.json"
val resourceCtxIri: Iri = iri"${base}resource.json"
val errorCtxIri: Uri = s"${base}error.json"
val resourceCtxIri: Uri = s"${base}resource.json"

}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ch.epfl.bluebrain.nexus.storage.config

import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.storage.UriUtils.addPath

/**
* Configuration for DeltaClient identities endpoint.
Expand All @@ -13,11 +14,11 @@ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
* the prefix
*/
final case class DeltaClientConfig(
publicIri: Iri,
internalIri: Iri,
publicIri: Uri,
internalIri: Uri,
prefix: String
) {
lazy val baseInternalIri: Iri = internalIri / prefix
lazy val basePublicIri: Iri = publicIri / prefix
lazy val identitiesIri: Iri = baseInternalIri / "identities"
lazy val baseInternalIri: Uri = addPath(internalIri, prefix)
lazy val basePublicIri: Uri = addPath(publicIri, prefix)
lazy val identitiesIri: Uri = addPath(baseInternalIri, "identities")
}
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
package ch.epfl.bluebrain.nexus.storage.config

import java.nio.file.{Path, Paths}

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri

import scala.annotation.nowarn
import ch.epfl.bluebrain.nexus.delta.rdf.implicits._
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers._
import pureconfig._
import pureconfig.generic.auto._

import java.nio.file.{Path, Paths}
import scala.annotation.nowarn

/**
* Akka settings extension to expose application configuration. It typically uses the configuration instance of the
Expand All @@ -27,8 +24,6 @@ class Settings(config: Config) extends Extension {
val appConfig: AppConfig = {
implicit val uriConverter: ConfigConvert[Uri] =
ConfigConvert.viaString[Uri](catchReadError(s => Uri(s)), _.toString)
implicit val iriConverter: ConfigConvert[Iri] =
ConfigConvert.viaString[Iri](catchReadError(s => iri"$s"), _.toString)
implicit val pathConverter: ConfigConvert[Path] =
ConfigConvert.viaString[Path](catchReadError(s => Paths.get(s)), _.toString)
ConfigSource.fromConfig(config).at("app").loadOrThrow[AppConfig]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package ch.epfl.bluebrain.nexus.storage.jsonld

import akka.http.scaladsl.model.Uri
import io.circe.Json

object JsonLdContext {

object keywords {
val context = "@context"
}

/**
* Adds a context Iri to an existing @context, or creates an @context with the Iri as a value.
*/
def addContext(json: Json, contextIri: Uri): Json = {
val jUriString = Json.fromString(contextIri.toString)

json.asObject match {
case Some(obj) =>
val updated = obj(keywords.context) match {
case None => obj.add(keywords.context, jUriString)
case Some(ctxValue) =>
(ctxValue.asObject, ctxValue.asArray, ctxValue.asString) match {
case (Some(co), _, _) if co.isEmpty => obj.add(keywords.context, jUriString)
case (_, Some(ca), _) if ca.isEmpty => obj.add(keywords.context, jUriString)
case (_, _, Some(cs)) if cs.isEmpty => obj.add(keywords.context, jUriString)
case (Some(co), _, _) if !co.values.exists(_ == jUriString) =>
obj.add(keywords.context, Json.arr(ctxValue, jUriString))
case (_, Some(ca), _) if !ca.contains(jUriString) =>
obj.add(keywords.context, Json.fromValues(ca :+ jUriString))
case (_, _, Some(cs)) if cs != contextIri.toString =>
obj.add(keywords.context, Json.arr(ctxValue, jUriString))
case _ => obj
}
}
Json.fromJsonObject(updated)
case None => json
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import akka.http.scaladsl.marshalling.GenericMarshallers.eitherMarshaller
import akka.http.scaladsl.marshalling._
import akka.http.scaladsl.model.MediaTypes._
import akka.http.scaladsl.model._
import ch.epfl.bluebrain.nexus.delta.rdf.RdfMediaTypes._
import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.sortKeys
import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.OrderedKeys
import ch.epfl.bluebrain.nexus.storage.Rejection
Expand All @@ -14,6 +13,7 @@ import io.circe._
import io.circe.syntax._
import monix.eval.Task
import monix.execution.Scheduler
import ch.epfl.bluebrain.nexus.storage.MediaTypes.`application/ld+json`

import scala.collection.immutable.Seq
import scala.concurrent.Future
Expand Down
Loading

0 comments on commit 4186060

Please sign in to comment.