Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into fail-on-bogus-fields
Browse files Browse the repository at this point in the history
  • Loading branch information
fsoikin committed Sep 9, 2024
2 parents c092b40 + e79c98b commit 4ceab91
Show file tree
Hide file tree
Showing 13 changed files with 262 additions and 102 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ Other improvements:
that command, not root help.
- a new `spago init --subpackage foo` option to initialize a sub-project in the
current workspace.
- #1110: `spago publish` will now install packages returned by the registry solver
before trying to build with them.
- Spago no longer ignores config fields that it doesn't recognize. This should
help catch typos in field names.

Expand Down
2 changes: 1 addition & 1 deletion core/src/Prelude.purs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import Data.Show.Generic (genericShow) as Extra
import Data.Traversable (for, traverse) as Extra
import Data.TraversableWithIndex (forWithIndex) as Extra
import Data.Tuple (Tuple(..), fst, snd) as Extra
import Data.Tuple.Nested ((/\)) as Extra
import Data.Tuple.Nested (type (/\), (/\)) as Extra
import Effect (Effect) as Extra
import Effect.Aff (Aff, Error) as Extra
import Effect.Aff.Class (class MonadAff, liftAff) as Extra
Expand Down
171 changes: 88 additions & 83 deletions src/Spago/Command/Fetch.purs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ module Spago.Command.Fetch
, getTransitiveDeps
, getTransitiveDepsFromRegistry
, getWorkspacePackageDeps
, fetchPackagesToLocalCache
, run
, toAllDependencies
, writeNewLockfile
Expand Down Expand Up @@ -83,7 +84,7 @@ run :: forall a. FetchOpts -> Spago (FetchEnv a) PackageTransitiveDeps
run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do
logDebug $ "Requested to install these packages: " <> printJson (CJ.array PackageName.codec) packagesRequestedToInstall

{ workspace: currentWorkspace, offline } <- ask
{ workspace: currentWorkspace } <- ask

let
getPackageConfigPath errorMessageEnd = do
Expand Down Expand Up @@ -192,95 +193,99 @@ run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do

-- then for every package we have we try to download it, and copy it in the local cache
logInfo "Downloading dependencies..."

parallelise $ (flip map) (Map.toUnfoldable depsToFetch :: Array (Tuple PackageName Package)) \(Tuple name package) -> do
let localPackageLocation = Config.getPackageLocation name package
-- first of all, we check if we have the package in the local cache. If so, we don't even do the work
unlessM (FS.exists localPackageLocation) case package of
GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage
RegistryVersion v -> do
-- if the version comes from the registry then we have a longer list of things to do
let versionString = Registry.Version.print v
let packageVersion = PackageName.print name <> "@" <> versionString
-- get the metadata for the package, so we have access to the hash and other info
metadata <- Registry.getMetadata name
case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup v meta.published)) of
Left err -> die $ "Couldn't read metadata, reason:\n " <> err
Right versionMetadata -> do
logDebug $ "Metadata read: " <> printJson Metadata.publishedMetadataCodec versionMetadata
-- then check if we have a tarball cached. If not, download it
let globalCachePackagePath = Path.concat [ Paths.globalCachePath, "packages", PackageName.print name ]
let archivePath = Path.concat [ globalCachePackagePath, versionString <> ".tar.gz" ]
FS.mkdirp globalCachePackagePath
-- We need to see if the tarball is there, and if we can decompress it.
-- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted.
-- By checking that it's broken we can try to redownload it here.
tarExists <- FS.exists archivePath
-- unpack the tars in a temp folder, then move to local cache
let tarInnerFolder = PackageName.print name <> "-" <> Version.print v
tempDir <- mkTemp
FS.mkdirp tempDir
tarIsGood <-
if tarExists then do
logDebug $ "Trying to unpack archive to temp folder: " <> tempDir
map (either (const false) (const true)) $ liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }
else
pure false
case tarExists, tarIsGood, offline of
true, true, _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days!
_, _, Offline -> die $ "Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress."
_, _, Online -> do
let packageUrl = "https://packages.registry.purescript.org/" <> PackageName.print name <> "/" <> versionString <> ".tar.gz"
logInfo $ "Fetching package " <> packageVersion
response <- liftAff $ withBackoff' do
res <- Http.request
( Http.defaultRequest
{ method = Left Method.GET
, responseFormat = Response.arrayBuffer
, url = packageUrl
}
)
-- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried
case res of
Right { status } | status == StatusCode 503 -> Aff.delay (Aff.Milliseconds 30_000.0)
_ -> pure unit
pure res
case response of
Nothing -> die $ "Couldn't reach the registry at " <> packageUrl
Just (Left err) -> die $ "Couldn't fetch package " <> packageVersion <> ":\n " <> Http.printError err
Just (Right { status, body }) | status /= StatusCode 200 -> do
(buf :: Buffer) <- liftEffect $ Buffer.fromArrayBuffer body
bodyString <- liftEffect $ Buffer.toString Encoding.UTF8 buf
die $ "Couldn't fetch package " <> packageVersion <> ", status was not ok " <> show status <> ", got answer:\n " <> bodyString
Just (Right r@{ body: archiveArrayBuffer }) -> do
logDebug $ "Got status: " <> show r.status
-- check the size and hash of the tar against the metadata
archiveBuffer <- liftEffect $ Buffer.fromArrayBuffer archiveArrayBuffer
archiveSize <- liftEffect $ Buffer.size archiveBuffer
archiveSha <- liftEffect $ Sha256.hashBuffer archiveBuffer
unless (Int.toNumber archiveSize == versionMetadata.bytes) do
die $ "Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> ") than expected (" <> show versionMetadata.bytes <> ")"
unless (archiveSha == versionMetadata.hash) do
die $ "Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256.print archiveSha <> ") than expected (" <> Sha256.print versionMetadata.hash <> ")"
-- if everything's alright we stash the tar in the global cache
logDebug $ "Fetched archive for " <> packageVersion <> ", saving it in the global cache: " <> archivePath
FS.writeFile archivePath archiveBuffer
logDebug $ "Unpacking archive to temp folder: " <> tempDir
(liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }) >>= case _ of
Right _ -> pure unit
Left err -> die [ "Failed to decode downloaded package " <> packageVersion <> ", error:", show err ]
logDebug $ "Moving extracted file to local cache:" <> localPackageLocation
FS.moveSync { src: (Path.concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation }
-- Local package, no work to be done
LocalPackage _ -> pure unit
WorkspacePackage _ -> pure unit
fetchPackagesToLocalCache depsToFetch

-- We return the dependencies, going through the lockfile write if we need to
-- (we return them from inside there because we need to update the commit hashes)
case workspace.packageSet.lockfile of
Right _lockfile -> pure dependencies
Left reason -> writeNewLockfile reason dependencies

fetchPackagesToLocalCache :: a. Map PackageName Package -> Spago (FetchEnv a) Unit
fetchPackagesToLocalCache packages = do
{ offline } <- ask
parallelise $ packages # Map.toUnfoldable <#> \(Tuple name package) -> do
let localPackageLocation = Config.getPackageLocation name package
-- first of all, we check if we have the package in the local cache. If so, we don't even do the work
unlessM (FS.exists localPackageLocation) case package of
GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage
RegistryVersion v -> do
-- if the version comes from the registry then we have a longer list of things to do
let versionString = Registry.Version.print v
let packageVersion = PackageName.print name <> "@" <> versionString
-- get the metadata for the package, so we have access to the hash and other info
metadata <- Registry.getMetadata name
case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup v meta.published)) of
Left err -> die $ "Couldn't read metadata, reason:\n " <> err
Right versionMetadata -> do
logDebug $ "Metadata read: " <> printJson Metadata.publishedMetadataCodec versionMetadata
-- then check if we have a tarball cached. If not, download it
let globalCachePackagePath = Path.concat [ Paths.globalCachePath, "packages", PackageName.print name ]
let archivePath = Path.concat [ globalCachePackagePath, versionString <> ".tar.gz" ]
FS.mkdirp globalCachePackagePath
-- We need to see if the tarball is there, and if we can decompress it.
-- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted.
-- By checking that it's broken we can try to redownload it here.
tarExists <- FS.exists archivePath
-- unpack the tars in a temp folder, then move to local cache
let tarInnerFolder = PackageName.print name <> "-" <> Version.print v
tempDir <- mkTemp
FS.mkdirp tempDir
tarIsGood <-
if tarExists then do
logDebug $ "Trying to unpack archive to temp folder: " <> tempDir
map (either (const false) (const true)) $ liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }
else
pure false
case tarExists, tarIsGood, offline of
true, true, _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days!
_, _, Offline -> die $ "Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress."
_, _, Online -> do
let packageUrl = "https://packages.registry.purescript.org/" <> PackageName.print name <> "/" <> versionString <> ".tar.gz"
logInfo $ "Fetching package " <> packageVersion
response <- liftAff $ withBackoff' do
res <- Http.request
( Http.defaultRequest
{ method = Left Method.GET
, responseFormat = Response.arrayBuffer
, url = packageUrl
}
)
-- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried
case res of
Right { status } | status == StatusCode 503 -> Aff.delay (Aff.Milliseconds 30_000.0)
_ -> pure unit
pure res
case response of
Nothing -> die $ "Couldn't reach the registry at " <> packageUrl
Just (Left err) -> die $ "Couldn't fetch package " <> packageVersion <> ":\n " <> Http.printError err
Just (Right { status, body }) | status /= StatusCode 200 -> do
(buf :: Buffer) <- liftEffect $ Buffer.fromArrayBuffer body
bodyString <- liftEffect $ Buffer.toString Encoding.UTF8 buf
die $ "Couldn't fetch package " <> packageVersion <> ", status was not ok " <> show status <> ", got answer:\n " <> bodyString
Just (Right r@{ body: archiveArrayBuffer }) -> do
logDebug $ "Got status: " <> show r.status
-- check the size and hash of the tar against the metadata
archiveBuffer <- liftEffect $ Buffer.fromArrayBuffer archiveArrayBuffer
archiveSize <- liftEffect $ Buffer.size archiveBuffer
archiveSha <- liftEffect $ Sha256.hashBuffer archiveBuffer
unless (Int.toNumber archiveSize == versionMetadata.bytes) do
die $ "Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> ") than expected (" <> show versionMetadata.bytes <> ")"
unless (archiveSha == versionMetadata.hash) do
die $ "Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256.print archiveSha <> ") than expected (" <> Sha256.print versionMetadata.hash <> ")"
-- if everything's alright we stash the tar in the global cache
logDebug $ "Fetched archive for " <> packageVersion <> ", saving it in the global cache: " <> archivePath
FS.writeFile archivePath archiveBuffer
logDebug $ "Unpacking archive to temp folder: " <> tempDir
(liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }) >>= case _ of
Right _ -> pure unit
Left err -> die [ "Failed to decode downloaded package " <> packageVersion <> ", error:", show err ]
logDebug $ "Moving extracted file to local cache:" <> localPackageLocation
FS.moveSync { src: (Path.concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation }
-- Local package, no work to be done
LocalPackage _ -> pure unit
WorkspacePackage _ -> pure unit

lookupInCache :: a k v. Ord k => k -> Ref.Ref (Map k v) -> Spago a (Maybe v)
lookupInCache key cacheRef = liftEffect $ Ref.read cacheRef >>= Map.lookup key >>> pure

Expand Down
1 change: 0 additions & 1 deletion src/Spago/Command/Ls.purs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ import Data.Codec.JSON.Record as CJ.Record
import Data.Foldable (elem)
import Data.Map (filterKeys)
import Data.Map as Map
import Data.Tuple.Nested (type (/\))
import Record as Record
import Registry.Internal.Codec (packageMap)
import Registry.PackageName as PackageName
Expand Down
1 change: 1 addition & 0 deletions src/Spago/Command/Publish.purs
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,7 @@ publish _args = do
-- from the solver (this is because the build might terminate the process, and we shall output the errors first)
logInfo "Building again with the build plan from the solver..."
let buildPlanDependencies = map Config.RegistryVersion resolutions
Fetch.fetchPackagesToLocalCache buildPlanDependencies
builtAgain <- runBuild { selected, dependencies: Map.singleton selected.package.name { core: buildPlanDependencies, test: Map.empty } }
( Build.run
{ depsOnly: false
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
.spago/
output/
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
Reading Spago workspace configuration...

✓ Selecting package to build: aaa

Downloading dependencies...
Building...
Src Lib All
Warnings 0 0 0
Errors 0 0 0

✓ Build succeeded.

Passed preliminary checks.
‼ Spago is in offline mode - not pushing the git tag v0.0.1
Building again with the build plan from the solver...
Building...
[1 of 3] Compiling Effect.Console
[3 of 3] Compiling Lib
[2 of 3] Compiling Effect.Class.Console
Src Lib All
Warnings 0 0 0
Errors 0 0 0

✓ Build succeeded.


✓ Ready for publishing. Calling the registry..


✘ Spago is offline - not able to call the Registry.
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
Reading Spago workspace configuration...

✓ Selecting package to build: aaa

Downloading dependencies...
Building...
[1 of 3] Compiling Effect.Console
[3 of 3] Compiling Lib
[2 of 3] Compiling Effect.Class.Console
Src Lib All
Warnings 0 0 0
Errors 0 0 0

✓ Build succeeded.

Passed preliminary checks.
‼ Spago is in offline mode - not pushing the git tag v0.0.1
Building again with the build plan from the solver...
Building...
[1 of 3] Compiling Effect.Console
[ERROR 1/1 MissingFFIModule] .spago/p/console-6.1.0/src/Effect/Console.purs:1:1

v
1 module Effect.Console where
2
3 import Control.Bind (discard, bind, pure)
...
86 result <- inner
87 groupEnd
88 pure result
^

The foreign module implementation for module Effect.Console is missing.

Src Lib All
Warnings 0 0 0
Errors 0 1 1

✘ Failed to build.
18 changes: 18 additions & 0 deletions test-fixtures/publish/1110-solver-different-version/spago.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package:
name: aaa
dependencies:
- console: ">=6.0.0 <7.0.0"
- effect: ">=4.0.0 <5.0.0"
- prelude: ">=6.0.1 <7.0.0"
- maybe: ">=6.0.0 <7.0.0"
publish:
version: 0.0.1
license: MIT
location:
githubOwner: purescript
githubRepo: aaa
workspace:
packageSet:
registry: 58.0.0
extraPackages:
console: "6.0.0"
10 changes: 10 additions & 0 deletions test-fixtures/publish/1110-solver-different-version/src/Main.purs
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
module Lib where

import Prelude

import Data.Maybe (Maybe(..), isNothing)
import Effect (Effect)
import Effect.Console (logShow)

printNothing :: Effect Unit
printNothing = logShow $ isNothing Nothing
4 changes: 4 additions & 0 deletions test/Prelude.purs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import Data.String as String
import Effect.Aff as Aff
import Effect.Class.Console (log)
import Effect.Class.Console as Console
import Node.FS.Aff as FS.Aff
import Node.Library.Execa (ExecaResult)
import Node.Path (dirname)
import Node.Path as Path
Expand Down Expand Up @@ -75,6 +76,9 @@ withTempDir = Aff.bracket createTempDir cleanupTempDir
cleanupTempDir { oldCwd } = do
liftEffect $ Process.chdir oldCwd

rmRf :: m. MonadAff m => FilePath -> m Unit
rmRf dir = liftAff $ FS.Aff.rm' dir { force: true, recursive: true, maxRetries: 5, retryDelay: 1000 }

shouldEqual
:: forall m t
. MonadThrow Error m
Expand Down
3 changes: 0 additions & 3 deletions test/Spago/Build/Monorepo.purs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import Data.Array as Array
import Data.String (Pattern(..))
import Data.String as String
import Effect.Aff (bracket)
import Node.FS.Aff as FS.Aff
import Node.Path as Path
import Node.Process as Process
import Spago.Cmd as Cmd
Expand Down Expand Up @@ -357,5 +356,3 @@ spec = Spec.describe "monorepo" do
res <- Cmd.exec "git" args opts
res # shouldBeSuccess
pure $ Cmd.getStdout res

rmRf dir = liftAff $ FS.Aff.rm' dir { force: true, recursive: true, maxRetries: 5, retryDelay: 1000 }
Loading

0 comments on commit 4ceab91

Please sign in to comment.