diff --git a/SPEC.md b/SPEC.md index c0f7094c7..bb545f214 100644 --- a/SPEC.md +++ b/SPEC.md @@ -207,7 +207,7 @@ Note: - Globs you provide at the `includeFiles` and `excludeFiles` keys must contain only `*`, `**`, `/`, `.`, `..`, and characters for Linux file paths. It is not possible to negate a glob (ie. the `!` character), and globs cannot represent a path out of the package source directory. - When packaging your project source, the registry will first "include" your `src` directory and always-included files such as your `purs.json` file. Then it will include files which match globs indicated by the `includeFiles` key ([always-ignored files](#always-ignored-files) cannot be included). Finally, it will apply the excluding globs indicated by the `excludeFiles` key to the included files ([always-included files](#always-included-files) cannot be excluded). -- Dependencies you provide at the `dependencies` key must exist in the registry, and the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies). +- Dependencies you provide at the `dependencies` key must exist in the registry, the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies), and transitive dependencies are not allowed (ie. any modules you import in your code must come from packages listed in your dependencies). For example: @@ -234,11 +234,12 @@ For example: All packages in the registry have an associated metadata file, which is located in the `metadata` directory of the `registry` repository under the package name. For example, the metadata for the `aff` package is located at: https://github.com/purescript/registry/blob/main/metadata/aff.json. Metadata files are the source of truth on all published and unpublished versions for a particular package for what there content is and where the package is located. Metadata files are produced by the registry, not by package authors, though they take some information from package manifests. -Each published version of a package records three fields: +Each published version of a package records four fields: - `hash`: a [`Sha256`](#Sha256) of the compressed archive fetched by the registry for the given version - `bytes`: the size of the tarball in bytes - `publishedTime`: the time the package was published as an `ISO8601` string +- `compilers`: compiler versions this package is known to work with. This field can be in one of two states: a single version indicates that the package worked with a specific compiler on upload but has not yet been tested with all compilers, whereas a non-empty array of versions indicates the package has been tested with all compilers the registry supports. Each unpublished version of a package records three fields: diff --git a/app/fixtures/github-packages/transitive-1.0.0/bower.json b/app/fixtures/github-packages/transitive-1.0.0/bower.json new file mode 100644 index 000000000..d0d4d0bd1 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/bower.json @@ -0,0 +1,12 @@ +{ + "name": "purescript-transitive", + "homepage": "https://github.com/purescript/purescript-transitive", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-transitive.git" + }, + "dependencies": { + "purescript-effect": "^4.0.0" + } +} diff --git a/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs new file mode 100644 index 000000000..71d771f62 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs @@ -0,0 +1,6 @@ +module Transitive where + +import Prelude + +uno :: Int +uno = one diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 0cffc4ab8..965567c83 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -6,6 +6,7 @@ "published": { "6.0.1": { "bytes": 31142, + "compilers": ["0.15.3", "0.15.4", "0.15.5"], "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index 68f250604..b5d5a86ea 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -6,6 +6,7 @@ "published": { "4.0.1": { "bytes": 2184, + "compilers": ["0.15.2", "0.15.3", "0.15.4"], "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 19e09564c..9a83187bd 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1,14 +1,19 @@ module Registry.App.API ( AuthenticatedEffects + , COMPILER_CACHE + , CompilerCache(..) , PackageSetUpdateEffects , PublishEffects + , _compilerCache , authenticated , copyPackageSourceFiles + , findAllCompilers , formatPursuitResolutions + , installBuildPlan , packageSetUpdate , packagingTeam - , parseInstalledModulePath , publish + , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -16,13 +21,15 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array -import Data.Array.NonEmpty as NEA import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.JSON as CJ +import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record -import Data.DateTime (DateTime) +import Data.Exists as Exists import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) +import Data.List.NonEmpty as NonEmptyList +import Data.Map (SemigroupMap(..)) import Data.Map as Map import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format @@ -33,7 +40,7 @@ import Data.String.CodeUnits as String.CodeUnits import Data.String.NonEmpty as NonEmptyString import Data.String.Regex as Regex import Effect.Aff as Aff -import Effect.Ref as Ref +import Effect.Unsafe (unsafePerformEffect) import JSON as JSON import Node.ChildProcess.Types (Exit(..)) import Node.FS.Aff as FS.Aff @@ -46,9 +53,12 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Registry.App.Auth as Auth -import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Cache (class FsEncodable, Cache) +import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment (COMMENT) import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) @@ -79,13 +89,14 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Octokit (IssueNumber(..), Team) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest import Registry.Metadata as Metadata import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetUpdateData, PublishData) import Registry.Operation as Operation -import Registry.Operation.Validation (UnpublishError(..), validateNoExcludedObligatoryFiles) +import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation import Registry.Owner as Owner import Registry.PackageName as PackageName @@ -94,12 +105,14 @@ import Registry.PursGraph (ModuleName(..)) import Registry.PursGraph as PursGraph import Registry.Range as Range import Registry.Sha256 as Sha256 +import Registry.Solver (CompilerIndex, DependencyIndex, Intersection, SolverErrors) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +import Safe.Coerce as Safe.Coerce type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) @@ -253,7 +266,7 @@ authenticated auth = case auth.payload of pure published pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Unpublishing is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -291,7 +304,7 @@ authenticated auth = case auth.payload of Just value -> pure value pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Transferring is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -315,17 +328,21 @@ authenticated auth = case auth.payload of Registry.mirrorLegacyRegistry payload.name payload.newLocation Comment.comment "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be -- | upload. If it has been published before then the existing metadata will be -- | updated with the new version. -publish :: forall r. PackageSource -> PublishData -> Run (PublishEffects + r) Unit -publish source payload = do +-- +-- The legacyIndex argument contains the unverified manifests produced by the +-- legacy importer; these manifests can be used on legacy packages to conform +-- them to the registry rule that transitive dependencies are not allowed. +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name - Log.debug $ "Publishing " <> printPackageSource source <> " package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload + Log.debug $ "Publishing package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload Log.debug $ "Verifying metadata..." Metadata existingMetadata <- Registry.readMetadata payload.name >>= case _ of @@ -368,23 +385,28 @@ publish source payload = do -- the package directory along with its detected publish time. Log.debug "Metadata validated. Fetching package source code..." tmp <- Tmp.mkTmpDir - { path: packageDirectory, published: publishedTime } <- Source.fetch source tmp existingMetadata.location payload.ref + { path: downloadedPackage, published: publishedTime } <- Source.fetch tmp existingMetadata.location payload.ref - Log.debug $ "Package downloaded to " <> packageDirectory <> ", verifying it contains a src directory with valid modules..." - Internal.Path.readPursFiles (Path.concat [ packageDirectory, "src" ]) >>= case _ of + Log.debug $ "Package downloaded to " <> downloadedPackage <> ", verifying it contains a src directory with valid modules..." + Internal.Path.readPursFiles (Path.concat [ downloadedPackage, "src" ]) >>= case _ of Nothing -> Except.throw $ Array.fold [ "This package has no PureScript files in its `src` directory. " , "All package sources must be in the `src` directory, with any additional " , "sources indicated by the `files` key in your manifest." ] - Just files -> do + Just files -> + -- The 'validatePursModules' function uses language-cst-parser, which only + -- supports syntax back to 0.15.0. We'll still try to validate the package + -- but it may fail to parse. Operation.Validation.validatePursModules files >>= case _ of + Left formattedError | payload.compiler < Purs.minLanguageCSTParser -> do + Log.debug $ "Package failed to parse in validatePursModules: " <> formattedError + Log.debug $ "Skipping check because package is published with a pre-0.15.0 compiler (" <> Version.print payload.compiler <> ")." Left formattedError -> Except.throw $ Array.fold [ "This package has either malformed or disallowed PureScript module names " - , "in its `src` directory. All package sources must be in the `src` directory, " - , "with any additional sources indicated by the `files` key in your manifest." + , "in its source: " , formattedError ] Right _ -> @@ -393,13 +415,18 @@ publish source payload = do -- If the package doesn't have a purs.json we can try to make one - possible scenarios: -- - in case it has a spago.yaml then we know how to read that, and have all the info to move forward -- - if it's a legacy import then we can try to infer as much info as possible to make a manifest - let packagePursJson = Path.concat [ packageDirectory, "purs.json" ] + let packagePursJson = Path.concat [ downloadedPackage, "purs.json" ] hadPursJson <- Run.liftEffect $ FS.Sync.exists packagePursJson - let packageSpagoYaml = Path.concat [ packageDirectory, "spago.yaml" ] + let packageSpagoYaml = Path.concat [ downloadedPackage, "spago.yaml" ] hasSpagoYaml <- Run.liftEffect $ FS.Sync.exists packageSpagoYaml - Manifest manifest <- + address <- case existingMetadata.location of + Git _ -> Except.throw "Packages can only come from GitHub for now." + GitHub { subdir: Just subdir } -> Except.throw $ "Packages cannot yet use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." + GitHub { owner, repo } -> pure { owner, repo } + + Manifest receivedManifest <- if hadPursJson then Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 packagePursJson)) >>= case _ of Left error -> do @@ -424,19 +451,15 @@ publish source payload = do Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do Comment.comment $ Array.fold - [ "Converted your spago.yaml into a purs.json manifest to use for publishing:\n" - , "```json" + [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```" + , "\n```\n" ] pure manifest else do Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." - address <- case existingMetadata.location of - Git _ -> Except.throw "Legacy packages can only come from GitHub." - GitHub { subdir: Just subdir } -> Except.throw $ "Legacy packages cannot use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." - GitHub { owner, repo } -> pure { owner, repo } version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -453,60 +476,69 @@ publish source payload = do Log.debug $ "Successfully produced a legacy manifest from the package source." let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest Comment.comment $ Array.fold - [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:\n" - , "```json" + [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```" + , "\n```\n" ] pure manifest -- We trust the manifest for any changes to the 'owners' field, but for all -- other fields we trust the registry metadata. - let metadata = existingMetadata { owners = manifest.owners } - unless (Operation.Validation.nameMatches (Manifest manifest) payload) do + let metadata = existingMetadata { owners = receivedManifest.owners } + unless (Operation.Validation.nameMatches (Manifest receivedManifest) payload) do Except.throw $ Array.fold [ "The manifest file specifies a package name (" - , PackageName.print manifest.name + , PackageName.print receivedManifest.name , ") that differs from the package name submitted to the API (" , PackageName.print payload.name , "). The manifest and API request must match." ] - unless (Operation.Validation.locationMatches (Manifest manifest) (Metadata metadata)) do - Except.throw $ Array.fold - [ "The manifest file specifies a location (" - , stringifyJson Location.codec manifest.location - , ") that differs from the location in the registry metadata (" - , stringifyJson Location.codec metadata.location - , "). If you would like to change the location of your package you should " - , "submit a transfer operation." - ] + unless (Operation.Validation.locationMatches (Manifest receivedManifest) (Metadata metadata)) do + if isJust maybeLegacyIndex then + -- The legacy importer is sometimes run on older packages, some of which have been transferred. Since + -- package metadata only records the latest location, this can cause a problem: the manifest reports + -- the location at the time, but the metadata reports the current location. + Log.warn $ Array.fold + [ "In legacy mode and manifest location differs from existing metadata. This indicates a package that was " + , "transferred from a previous location. Ignoring location match validation..." + ] + else + Except.throw $ Array.fold + [ "The manifest file specifies a location (" + , stringifyJson Location.codec receivedManifest.location + , ") that differs from the location in the registry metadata (" + , stringifyJson Location.codec metadata.location + , "). If you would like to change the location of your package you should " + , "submit a transfer operation." + ] - when (Operation.Validation.isMetadataPackage (Manifest manifest)) do + when (Operation.Validation.isMetadataPackage (Manifest receivedManifest)) do Except.throw "The `metadata` package cannot be uploaded to the registry because it is a protected package." - for_ (Operation.Validation.isNotUnpublished (Manifest manifest) (Metadata metadata)) \info -> do + for_ (Operation.Validation.isNotUnpublished (Manifest receivedManifest) (Metadata metadata)) \info -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that has been unpublished: " <> Version.print manifest.version + [ "You tried to upload a version that has been unpublished: " <> Version.print receivedManifest.version , "" , "```json" , printJson Metadata.unpublishedMetadataCodec info , "```" ] - case Operation.Validation.isNotPublished (Manifest manifest) (Metadata metadata) of + case Operation.Validation.isNotPublished (Manifest receivedManifest) (Metadata metadata) of -- If the package has been published already, then we check whether the published -- version has made it to Pursuit or not. If it has, then we terminate here. If -- it hasn't then we publish to Pursuit and then terminate. Just info -> do - published <- Pursuit.getPublishedVersions manifest.name >>= case _ of + published <- Pursuit.getPublishedVersions receivedManifest.name >>= case _ of Left error -> Except.throw error Right versions -> pure versions - case Map.lookup manifest.version published of + case Map.lookup receivedManifest.version published of Just url -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that already exists: " <> Version.print manifest.version + [ "You tried to upload a version that already exists: " <> Version.print receivedManifest.version , "" , "Its metadata is:" , "```json" @@ -517,300 +549,272 @@ publish source payload = do , url ] + Nothing | payload.compiler < Purs.minPursuitPublish -> do + Comment.comment $ Array.fold + [ "This version has already been published to the registry, but the docs have not been " + , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " + , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish + , ". Please try with a later compiler." + ] + Nothing -> do Comment.comment $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions - compilationResult <- compilePackage { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } + compilerIndex <- readCompilerIndex + verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + let installedResolutions = Path.concat [ tmp, ".registry" ] + installBuildPlan verifiedResolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just downloadedPackage + } case compilationResult of - Left error -> do + Left compileFailure -> do + let error = printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do + Right _ -> do Log.debug "Uploading to Pursuit" -- While we have created a manifest from the package source, we -- still need to ensure a purs.json file exists for 'purs publish'. unless hadPursJson do - existingManifest <- ManifestIndex.readManifest manifest.name manifest.version + existingManifest <- ManifestIndex.readManifest receivedManifest.name receivedManifest.version case existingManifest of Nothing -> Except.throw "Version was previously published, but we could not find a purs.json file in the package source, and no existing manifest was found in the registry." Just existing -> Run.liftAff $ writeJsonFile Manifest.codec packagePursJson existing - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of + Left publishErr -> Except.throw publishErr + Right _ -> do + FS.Extra.remove tmp + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" -- In this case the package version has not been published, so we proceed -- with ordinary publishing. - Nothing -> - -- Now that we've verified the package we can write the manifest to the source - -- directory and then publish it. - if hadPursJson then do - -- No need to verify the generated manifest because nothing was generated, - -- and no need to write a file (it's already in the package source.) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - - else if hasSpagoYaml then do - -- We need to write the generated purs.json file, but because spago-next - -- already does unused dependency checks and supports explicit test-only - -- dependencies we can skip those checks. - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } + Nothing -> do + Log.info "Verifying the package build plan..." + compilerIndex <- readCompilerIndex + validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + + Comment.comment "Verifying unused and/or missing dependencies..." + + -- First we install the resolutions and call 'purs graph' to adjust the + -- manifest as needed, but we defer compilation until after this check + -- in case the package manifest and resolutions are adjusted. + let installedResolutions = Path.concat [ tmp, ".registry" ] + installBuildPlan validatedResolutions installedResolutions + + let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] + let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] + let pursGraph = Purs.Graph { globs: [ srcGlobs, depGlobs ] } + + -- We need to use the minimum compiler version that supports 'purs graph'. + let pursGraphCompiler = if payload.compiler >= Purs.minPursGraph then payload.compiler else Purs.minPursGraph + + -- In this step we run 'purs graph' to get a graph of the package source + -- and installed dependencies and use that to determine if the manifest + -- contains any unused or missing dependencies. If it does and is a legacy + -- manifest then we fix it and return the result. If does and is a modern + -- manifest (spago.yaml, purs.json, etc.) then we reject it. If it doesn't + -- then we simply return the manifest and resolutions we already had. + Tuple manifest resolutions <- Run.liftAff (Purs.callCompiler { command: pursGraph, version: Just pursGraphCompiler, cwd: Nothing }) >>= case _ of + Left err -> case err of + UnknownError str -> Except.throw str + MissingCompiler -> Except.throw $ "Missing compiler " <> Version.print pursGraphCompiler + CompilationError errs -> do + Log.warn $ Array.fold + [ "Failed to discover unused dependencies because purs graph failed:\n" + , Purs.printCompilerErrors errs + ] + -- The purs graph command will fail if the source code uses syntax + -- before the oldest usable purs graph compiler (ie. 0.14.0). In + -- this case we simply accept the dependencies as-is, even though + -- they could technically violate Registry rules around missing and + -- unused dependencies. This only affects old packages and we know + -- they compile, so we've decided it's an acceptable exception. + pure $ Tuple (Manifest receivedManifest) validatedResolutions + Right output -> case JSON.parse output of + Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr + Right json -> case CJ.decode PursGraph.pursGraphCodec json of + Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CJ.DecodeError.print decodeErr + Right graph -> do + Log.debug "Got a valid graph of source and dependencies." + let + pathParser path = map _.name $ case String.stripPrefix (String.Pattern installedResolutions) path of + Just trimmed -> parseModulePath trimmed + Nothing -> case String.stripPrefix (String.Pattern downloadedPackage) path of + Just _ -> Right { name: receivedManifest.name, version: receivedManifest.version } + Nothing -> Left $ "Failed to parse module path " <> path <> " because it is not in the package source or installed dependencies." + + case Operation.Validation.noTransitiveOrMissingDeps (Manifest receivedManifest) graph pathParser of + -- Association failures should always throw + Left (Left assocErrors) -> + Except.throw $ Array.fold + [ "Failed to validate unused / missing dependencies because modules could not be associated with package names:" + , flip NonEmptyArray.foldMap1 assocErrors \{ error, module: ModuleName moduleName, path } -> + "\n - " <> moduleName <> " (" <> path <> "): " <> error + ] + + -- FIXME: For now we attempt to fix packages if a legacy index + -- is provided (ie. the publish is via the importer) but we + -- should at some point make this a hard error. + Left (Right depError) -> case maybeLegacyIndex of + Nothing -> + Except.throw $ "Failed to validate unused / missing dependencies: " <> Operation.Validation.printValidateDepsError depError + Just legacyIndex -> do + Log.info $ "Found fixable dependency errors: " <> Operation.Validation.printValidateDepsError depError + conformLegacyManifest (Manifest receivedManifest) payload.compiler compilerIndex legacyIndex depError + + -- If the check passes then we can simply return the manifest and + -- resolutions. + Right _ -> pure $ Tuple (Manifest receivedManifest) validatedResolutions + + -- Now that we've verified the package we can write the manifest to the + -- source directory. + Run.liftAff $ writeJsonFile Manifest.codec packagePursJson manifest + + Log.info "Creating packaging directory" + let packageDirname = PackageName.print receivedManifest.name <> "-" <> Version.print receivedManifest.version + let packageSource = Path.concat [ tmp, packageDirname ] + FS.Extra.ensureDirectory packageSource + -- We copy over all files that are always included (ie. src dir, purs.json file), + -- and any files the user asked for via the 'files' key, and remove all files + -- that should never be included (even if the user asked for them). + copyPackageSourceFiles { includeFiles: receivedManifest.includeFiles, excludeFiles: receivedManifest.excludeFiles, source: downloadedPackage, destination: packageSource } + removeIgnoredTarballFiles packageSource + + -- Now that we have the package source contents we can verify we can compile + -- the package with exactly what is going to be uploaded. + Comment.comment $ Array.fold + [ "Verifying package compiles using compiler " + , Version.print payload.compiler + , " and resolutions:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Version.codec) resolutions + , "\n```" + ] - -- Otherwise this is a legacy package, generated from a combination of bower, - -- spago.dhall, and package set files, so we need to verify the generated - -- manifest does not contain unused dependencies before writing it. + -- We clear the installation directory so that no old installed resolutions + -- stick around. + Run.liftAff $ FS.Extra.remove installedResolutions + installBuildPlan resolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just tmp + } + + case compilationResult of + Left compileFailure -> do + let error = printCompilerFailure payload.compiler compileFailure + Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error + Right _ -> pure unit + + Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + let tarballName = packageDirname <> ".tar.gz" + let tarballPath = Path.concat [ tmp, tarballName ] + Tar.create { cwd: tmp, folderName: packageDirname } + + Log.info "Tarball created. Verifying its size..." + bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath + for_ (Operation.Validation.validateTarballSize bytes) case _ of + Operation.Validation.ExceedsMaximum maxPackageBytes -> + Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." + Operation.Validation.WarnPackageSize maxWarnBytes -> + Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + + -- If a package has under ~30 bytes it's about guaranteed that packaging the + -- tarball failed. This can happen if the system running the API has a non- + -- GNU tar installed, for example. + let minBytes = 30.0 + when (bytes < minBytes) do + Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." + + hash <- Sha256.hashFile tarballPath + Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." + Log.info $ "Tarball hash: " <> Sha256.print hash + + Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath + Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." + let newPublishedVersion = { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } + let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } + + Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) + Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + + -- We write to the registry index if possible. If this fails, the packaging + -- team should manually insert the entry. + Log.debug "Adding the new version to the registry index" + Registry.writeManifest manifest + + Registry.mirrorLegacyRegistry payload.name newMetadata.location + Comment.comment "Mirrored registry operation to the legacy registry!" + + Log.debug "Uploading package documentation to Pursuit" + if payload.compiler >= Purs.minPursuitPublish then + -- TODO: We must use the 'downloadedPackage' instead of 'packageSource' + -- because Pursuit requires a git repository, and our tarball directory + -- is not one. This should be changed once Pursuit no longer needs git. + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of + Left publishErr -> do + Log.error publishErr + Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Right _ -> + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Log.debug "Pruning unused dependencies from legacy package manifest..." - - Log.debug "Solving manifest to get all transitive dependencies." - resolutions <- verifyResolutions (Manifest manifest) payload.resolutions - - Log.debug "Installing dependencies." - tmpDepsDir <- Tmp.mkTmpDir - installBuildPlan resolutions tmpDepsDir - - Log.debug "Discovering used dependencies from source." - let srcGlobs = Path.concat [ packageDirectory, "src", "**", "*.purs" ] - let depGlobs = Path.concat [ tmpDepsDir, "*", "src", "**", "*.purs" ] - let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] } - -- We need to use the minimum compiler version that supports 'purs graph' - let minGraphCompiler = unsafeFromRight (Version.parse "0.13.8") - let callCompilerVersion = if payload.compiler >= minGraphCompiler then payload.compiler else minGraphCompiler - Run.liftAff (Purs.callCompiler { command, version: Just callCompilerVersion, cwd: Nothing }) >>= case _ of - Left err -> do - let prefix = "Failed to discover unused dependencies because purs graph failed: " - Log.error $ prefix <> case err of - UnknownError str -> str - CompilationError errs -> Purs.printCompilerErrors errs - MissingCompiler -> "missing compiler " <> Version.print payload.compiler - -- We allow legacy packages through even if we couldn't run purs graph, - -- because we can't be sure we chose the correct compiler version. - if source == LegacyPackage then - Comment.comment "Failed to prune dependencies for legacy package, continuing anyway..." - else do - Except.throw "purs graph failed; cannot verify unused dependencies." - Right output -> case JSON.parse output of - Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr - Right json -> case CJ.decode PursGraph.pursGraphCodec json of - Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CJ.DecodeError.print decodeErr - Right graph -> do - Log.debug "Got a valid graph of source and dependencies. Removing install dir and associating discovered modules with their packages..." - FS.Extra.remove tmpDepsDir - - let - -- We need access to a graph that _doesn't_ include the package - -- source, because we only care about dependencies of the package. - noSrcGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - - pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmpDepsDir, path: _ } - - case PursGraph.associateModules pathParser noSrcGraph of - Left errs -> - Except.throw $ String.joinWith "\n" - [ "Failed to associate modules with packages while finding unused dependencies:" - , flip NonEmptyArray.foldMap1 errs \{ error, module: ModuleName moduleName, path } -> - " - " <> moduleName <> " (" <> path <> "): " <> error <> "\n" - ] - Right modulePackageMap -> do - Log.debug "Associated modules with their package names. Finding all modules used in package source..." - -- The modules used in the package source code are any that have - -- a path beginning with the package source directory. We only - -- care about dependents of these modules. - let sourceModules = Map.keys $ Map.filter (isJust <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - - Log.debug "Found all modules used in package source. Finding all modules used by those modules..." - let allReachableModules = PursGraph.allDependenciesOf sourceModules graph - - -- Then we can associate each reachable module with its package - -- name to get the full set of used package names. - let allUsedPackages = Set.mapMaybe (flip Map.lookup modulePackageMap) allReachableModules - - -- Finally, we can use this to find the unused dependencies. - Log.debug "Found all packages reachable by the project source code. Determining unused dependencies..." - case Operation.Validation.getUnusedDependencies (Manifest manifest) resolutions allUsedPackages of - Nothing -> do - Log.debug "No unused dependencies! This manifest is good to go." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - Just isUnused -> do - let printed = String.joinWith ", " (PackageName.print <$> NonEmptySet.toUnfoldable isUnused) - Log.debug $ "Found unused dependencies: " <> printed - Comment.comment $ "Generated legacy manifest contains unused dependencies which will be removed: " <> printed - let verified = manifest { dependencies = Map.filterKeys (not <<< flip NonEmptySet.member isUnused) manifest.dependencies } - Log.debug "Writing updated, pruned manifest." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest verified) - publishRegistry - { source - , manifest: Manifest verified - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - -type PublishRegistry = - { source :: PackageSource - , manifest :: Manifest - , metadata :: Metadata - , payload :: PublishData - , publishedTime :: DateTime - , tmp :: FilePath - , packageDirectory :: FilePath - } + Comment.comment $ Array.fold + [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" + , Version.print payload.compiler + , "). If you want to publish documentation, please try again with a later compiler." + ] --- A private helper function for publishing to the registry. Separated out of --- the main 'publish' function because we sometimes use the publish function to --- publish to Pursuit only (in the case the package has been pushed to the --- registry, but docs have not been uploaded). -publishRegistry :: forall r. PublishRegistry -> Run (PublishEffects + r) Unit -publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do - Log.debug "Verifying the package build plan..." - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions - - Log.debug "Verifying that the package dependencies are all registered..." - unregisteredRef <- Run.liftEffect $ Ref.new Map.empty - forWithIndex_ verifiedResolutions \name version -> do - Registry.readMetadata name >>= case _ of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just (Metadata { published }) -> case Map.lookup version published of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just _ -> pure unit - unregistered <- Run.liftEffect $ Ref.read unregisteredRef - unless (Map.isEmpty unregistered) do - Except.throw $ Array.fold - [ "Cannot register this package because it has unregistered dependencies: " - , Array.foldMap (\(Tuple name version) -> "\n - " <> formatPackageVersion name version) (Map.toUnfoldable unregistered) - ] + Comment.comment "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } - Log.info "Packaging tarball for upload..." - let newDir = PackageName.print manifest.name <> "-" <> Version.print manifest.version - let packageSourceDir = Path.concat [ tmp, newDir ] - Log.debug $ "Creating packaging directory at " <> packageSourceDir - FS.Extra.ensureDirectory packageSourceDir - -- We copy over all files that are always included (ie. src dir, purs.json file), - -- and any files the user asked for via the 'files' key, and remove all files - -- that should never be included (even if the user asked for them). - copyPackageSourceFiles { includeFiles: manifest.includeFiles, excludeFiles: manifest.excludeFiles, source: packageDirectory, destination: packageSourceDir } - Log.debug "Removing always-ignored files from the packaging directory." - removeIgnoredTarballFiles packageSourceDir - - let tarballName = newDir <> ".tar.gz" - let tarballPath = Path.concat [ tmp, tarballName ] - Tar.create { cwd: tmp, folderName: newDir } - - Log.info "Tarball created. Verifying its size..." - bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath - for_ (Operation.Validation.validateTarballSize bytes) case _ of - Operation.Validation.ExceedsMaximum maxPackageBytes -> - Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." - Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." - - -- If a package has under ~30 bytes it's about guaranteed that packaging the - -- tarball failed. This can happen if the system running the API has a non- - -- GNU tar installed, for example. - let minBytes = 30.0 - when (bytes < minBytes) do - Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." - - hash <- Sha256.hashFile tarballPath - Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." - Log.info $ "Tarball hash: " <> Sha256.print hash - - -- Now that we have the package source contents we can verify we can compile - -- the package. We skip failures when the package is a legacy package. - Log.info "Verifying package compiles (this may take a while)..." - compilationResult <- compilePackage - { packageSourceDir: packageDirectory - , compiler: payload.compiler - , resolutions: verifiedResolutions - } + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - case compilationResult of - Left error - -- We allow legacy packages to fail compilation because we do not - -- necessarily know what compiler to use with them. - | source == LegacyPackage -> do - Log.debug error - Log.warn "Failed to compile, but continuing because this package is a legacy package." - | otherwise -> - Except.throw error - Right _ -> - pure unit - - Comment.comment "Package is verified! Uploading it to the storage backend..." - Storage.upload manifest.name manifest.version tarballPath - Log.debug $ "Adding the new version " <> Version.print manifest.version <> " to the package metadata file." - let newMetadata = metadata { published = Map.insert manifest.version { hash, ref: payload.ref, publishedTime, bytes } metadata.published } - Registry.writeMetadata manifest.name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" - - -- After a package has been uploaded we add it to the registry index, we - -- upload its documentation to Pursuit, and we can now process it for package - -- sets when the next batch goes out. - - -- We write to the registry index if possible. If this fails, the packaging - -- team should manually insert the entry. - Registry.writeManifest (Manifest manifest) - - when (source == CurrentPackage) $ case compilationResult of - Left error -> do - Log.error $ "Compilation failed, cannot upload to pursuit: " <> error - Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do - Log.debug "Uploading to Pursuit" - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } - - Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry." + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right (NonEmptySet.toUnfoldable1 validCompilers) })) (un Manifest manifest).version newMetadata.published } + Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + + Comment.comment "Wrote completed metadata to the registry!" + FS.Extra.remove tmp -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the -- | manifest. If not, we solve their manifest to produce a build plan. -verifyResolutions :: forall r. Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + EXCEPT String + r) (Map PackageName Version) -verifyResolutions manifest resolutions = do +verifyResolutions :: forall r. CompilerIndex -> Version -> Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + AFF + EXCEPT String + r) (Map PackageName Version) +verifyResolutions compilerIndex compiler manifest resolutions = do Log.debug "Check the submitted build plan matches the manifest" - manifestIndex <- Registry.readAllManifests case resolutions of - Nothing -> case Operation.Validation.validateDependenciesSolve manifest manifestIndex of - Left errors -> do - let - printedError = String.joinWith "\n" - [ "Could not produce valid dependencies for manifest." - , "```" - , errors # foldMapWithIndex \index error -> String.joinWith "\n" - [ "[Error " <> show (index + 1) <> "]" - , Solver.printSolverError error - ] - , "```" - ] - Except.throw printedError - Right solved -> pure solved + Nothing -> do + case Operation.Validation.validateDependenciesSolve compiler manifest compilerIndex of + Left errors -> do + let + printedError = String.joinWith "\n" + [ "Could not produce valid dependencies for manifest." + , "```" + , errors # foldMapWithIndex \index error -> String.joinWith "\n" + [ "[Error " <> show (index + 1) <> "]" + , Solver.printSolverError error + ] + , "```" + ] + Except.throw printedError + Right solved -> pure solved Just provided -> do validateResolutions manifest provided pure provided @@ -860,61 +864,88 @@ validateResolutions manifest resolutions = do , incorrectVersionsError ] -type CompilePackage = - { packageSourceDir :: FilePath - , compiler :: Version - , resolutions :: Map PackageName Version +type FindAllCompilersResult = + { failed :: Map Version (Either SolverErrors CompilerFailure) + , succeeded :: Set Version } -compilePackage :: forall r. CompilePackage -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either String FilePath) -compilePackage { packageSourceDir, compiler, resolutions } = Except.runExcept do - tmp <- Tmp.mkTmpDir - let dependenciesDir = Path.concat [ tmp, ".registry" ] - FS.Extra.ensureDirectory dependenciesDir - - let - globs = - if Map.isEmpty resolutions then - [ "src/**/*.purs" ] - else - [ "src/**/*.purs" - , Path.concat [ dependenciesDir, "*/src/**/*.purs" ] - ] - - Log.debug "Installing build plan..." - installBuildPlan resolutions dependenciesDir - - Log.debug "Compiling..." - compilerOutput <- Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs } - , version: Just compiler - , cwd: Just packageSourceDir - } - - case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed for your package due to a compiler error:" - , "```" - , err - , "```" - ] - Right _ -> pure dependenciesDir +-- | Find all compilers that can compile the package source code and installed +-- | resolutions from the given array of compilers. +findAllCompilers + :: forall r + . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } + -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult +findAllCompilers { source, manifest, compilers } = do + compilerIndex <- readCompilerIndex + checkedCompilers <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target + case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of + Left solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + pure $ Left $ Tuple target (Left solverErrors) + Right (Tuple mbCompiler resolutions) -> do + Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions + case mbCompiler of + Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" + Just selected | selected /= target -> Except.throw $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print selected + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + Just _ -> pure unit + Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of + Nothing -> do + Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan resolutions installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Right _ -> do + Log.debug $ "Compilation succeeded with compiler " <> Version.print target + Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } + pure $ bimap (Tuple target <<< Right) (const target) result + Just { result } -> + pure $ bimap (Tuple target <<< Right) (const target) result + + let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers + pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] -- | Install all dependencies indicated by the build plan to the specified -- | directory. Packages will be installed at 'dir/package-name-x.y.z'. installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir -- We fetch every dependency at its resolved version, unpack the tarball, and -- store the resulting source code in a specified directory for dependencies. forWithIndex_ resolutions \name version -> do @@ -934,11 +965,10 @@ installBuildPlan resolutions dependenciesDir = do Log.debug $ "Installed " <> formatPackageVersion name version -- | Parse the name and version from a path to a module installed in the standard --- | form: '/-/...' -parseInstalledModulePath :: { prefix :: FilePath, path :: FilePath } -> Either String { name :: PackageName, version :: Version } -parseInstalledModulePath { prefix, path } = do +-- | form: '-...' +parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } +parseModulePath path = do packageVersion <- lmap Parsing.parseErrorMessage $ Parsing.runParser path do - _ <- Parsing.String.string prefix _ <- Parsing.Combinators.optional (Parsing.Combinators.try (Parsing.String.string Path.sep)) Tuple packageVersionChars _ <- Parsing.Combinators.Array.manyTill_ Parsing.String.anyChar (Parsing.String.string Path.sep) pure $ String.CodeUnits.fromCharArray (Array.fromFoldable packageVersionChars) @@ -955,38 +985,40 @@ parseInstalledModulePath { prefix, path } = do pure { name, version } type PublishToPursuit = - { packageSourceDir :: FilePath - , dependenciesDir :: FilePath + { source :: FilePath , compiler :: Version , resolutions :: Map PackageName Version + , installedResolutions :: FilePath } -- | Publishes a package to Pursuit. -- | -- | ASSUMPTIONS: This function should not be run on legacy packages or on --- | packages where the `purescript-` prefix is still present. +-- | packages where the `purescript-` prefix is still present. Cannot be used +-- | on packages prior to 'Purs.minPursuitPublish' publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) Unit -publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = do + -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) +publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir + when (compiler < Purs.minPursuitPublish) do + Except.throw $ "Cannot publish to Pursuit because this package was published with a pre-0.14.7 compiler (" <> Version.print compiler <> "). If you want to publish documentation, please try again with a later compiler." + let - resolvedPaths = formatPursuitResolutions { resolutions, dependenciesDir } + resolvedPaths = formatPursuitResolutions { resolutions, installedResolutions } resolutionsFilePath = Path.concat [ tmp, "resolutions.json" ] Run.liftAff $ writeJsonFile pursuitResolutionsCodec resolutionsFilePath resolvedPaths -- The 'purs publish' command requires a clean working tree, but it isn't - -- guaranteed that packages have an adequate .gitignore file; compilers prior - -- to 0.14.7 did not ignore the purs.json file when publishing. So we stash - -- changes made during the publishing process (ie. inclusion of a new purs.json - -- file and an output directory from compilation) before calling purs publish. + -- guaranteed that packages have an adequate .gitignore file. So we stash + -- stash changes made during the publishing process before calling publish. -- https://git-scm.com/docs/gitignore Log.debug "Adding output and purs.json to local git excludes..." - Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ packageSourceDir, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) + Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ source, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) -- NOTE: The compatibility version of purs publish appends 'purescript-' to the -- package name in the manifest file: @@ -997,27 +1029,12 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = compilerOutput <- Run.liftAff $ Purs.callCompiler { command: Purs.Publish { resolutions: resolutionsFilePath } , version: Just compiler - , cwd: Just packageSourceDir + , cwd: Just source } publishJson <- case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Publishing failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed for your package due to an unknown compiler error:" - , "```" - , err - , "```" - ] + Left error -> + Except.throw $ printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1039,7 +1056,7 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = Left error -> Except.throw $ "Could not publish your package to Pursuit because an error was encountered (cc: @purescript/packaging): " <> error Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + FS.Extra.remove tmp type PursuitResolutions = Map RawPackageName { version :: Version, path :: FilePath } @@ -1050,13 +1067,13 @@ pursuitResolutionsCodec = rawPackageNameMapCodec $ CJ.named "Resolution" $ CJ.Re -- -- Note: This interfaces with Pursuit, and therefore we must add purescript- -- prefixes to all package names for compatibility with the Bower naming format. -formatPursuitResolutions :: { resolutions :: Map PackageName Version, dependenciesDir :: FilePath } -> PursuitResolutions -formatPursuitResolutions { resolutions, dependenciesDir } = +formatPursuitResolutions :: { resolutions :: Map PackageName Version, installedResolutions :: FilePath } -> PursuitResolutions +formatPursuitResolutions { resolutions, installedResolutions } = Map.fromFoldable do Tuple name version <- Map.toUnfoldable resolutions let bowerPackageName = RawPackageName ("purescript-" <> PackageName.print name) - packagePath = Path.concat [ dependenciesDir, PackageName.print name <> "-" <> Version.print version ] + packagePath = Path.concat [ installedResolutions, PackageName.print name <> "-" <> Version.print version ] [ Tuple bowerPackageName { path: packagePath, version } ] -- | Copy files from the package source directory to the destination directory @@ -1163,3 +1180,183 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } + +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +type AdjustManifest = + { source :: FilePath + , compiler :: Version + , manifest :: Manifest + , legacyIndex :: Maybe DependencyIndex + , currentIndex :: CompilerIndex + , resolutions :: Maybe (Map PackageName Version) + } + +-- | Conform a legacy manifest to the Registry requirements for dependencies, +-- | ie. that all direct imports are listed (no transitive dependencies) and no +-- | unused dependencies are listed. +conformLegacyManifest + :: forall r + . Manifest + -> Version + -> CompilerIndex + -> Solver.TransitivizedRegistry + -> ValidateDepsError + -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) +conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do + let + manifestRequired :: SemigroupMap PackageName Intersection + manifestRequired = Solver.initializeRequired manifest.dependencies + + legacyResolutions <- case Solver.solveFull { registry: legacyRegistry, required: manifestRequired } of + Left unsolvableLegacy -> do + Log.warn $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableLegacy + case Solver.solveWithCompiler (Range.exact compiler) currentIndex manifest.dependencies of + Left unsolvableCurrent -> Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableCurrent + Right (Tuple _ solved) -> do + Log.debug $ "Got current resolutions as a fallback to unsolvable legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved + Right solved -> do + Log.debug $ "Got legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved + + let + legacyTransitive :: Map PackageName Range + legacyTransitive = + Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + $ Safe.Coerce.coerce + $ _.required + $ Solver.solveSteps + $ Solver.solveSeed + $ Solver.withReachable { registry: legacyRegistry, required: manifestRequired } + + Log.debug $ "Got transitive solution:\n" <> printJson (Internal.Codec.packageMap Range.codec) legacyTransitive + + let + associateMissing :: Array PackageName -> Map PackageName Range + associateMissing packages = do + -- First we look up the package in the produced transitive ranges, as those + -- are the most likely to be correct. + let associateTransitive pkg = maybe (Left pkg) (\range -> Right (Tuple pkg range)) (Map.lookup pkg legacyTransitive) + let associated = partitionEithers (map associateTransitive packages) + let foundFromTransitive = Map.fromFoldable associated.success + + -- If not found, we search for the ranges described for this dependency + -- in the manifests of the packages in the resolutions. + let + resolutionRanges :: Map PackageName Range + resolutionRanges = do + let + foldFn name prev version = fromMaybe prev do + versions <- Map.lookup name (un SemigroupMap legacyRegistry) + deps <- Map.lookup version (un SemigroupMap versions) + let deps' = Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) (un SemigroupMap deps) + pure $ Map.unionWith (\l r -> fromMaybe l (Range.intersect l r)) prev deps' + + foldlWithIndex foldFn Map.empty legacyResolutions + + foundFromResolutions :: Map PackageName Range + foundFromResolutions = Map.fromFoldable do + associated.fail # Array.mapMaybe \pkg -> map (Tuple pkg) (Map.lookup pkg resolutionRanges) + + Map.union foundFromTransitive foundFromResolutions + + fixUnused names (Manifest m) = do + let unused = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) names + let fixedDependencies = Map.difference m.dependencies unused + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved + + fixMissing names (Manifest m) = do + let fixedDependencies = Map.union m.dependencies (associateMissing (NonEmptySet.toUnfoldable names)) + -- Once we've fixed the missing dependencies we need to be sure we can still + -- produce a viable solution with the current index. + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved + + previousDepsMessage = Array.fold + [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones and add direct-imported ones. " + , "Your dependency list was:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + , "\n```\n" + ] + + newDepsMessage (Manifest new) = Array.fold + [ "\nYour new dependency list is:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) new.dependencies + , "\n```\n" + ] + + case problem of + UnusedDependencies names -> do + Tuple deps resolutions <- fixUnused names (Manifest manifest) + let newManifest = Manifest (manifest { dependencies = deps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + MissingDependencies names -> do + Tuple deps resolutions <- fixMissing names (Manifest manifest) + let newManifest = Manifest (manifest { dependencies = deps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + UnusedAndMissing { missing, unused } -> do + let unused' = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) unused + let trimmed = Map.difference manifest.dependencies unused' + Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) + let newManifest = Manifest (manifest { dependencies = newDeps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" + , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest newResolutions + +type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) + +_compilerCache :: Proxy "compilerCache" +_compilerCache = Proxy + +data CompilerCache :: (Type -> Type -> Type) -> Type -> Type +data CompilerCache c a = Compilation Manifest (Map PackageName Version) Version (c { target :: Version, result :: Either CompilerFailure Unit } a) + +instance Functor2 c => Functor (CompilerCache c) where + map k (Compilation manifest resolutions compiler a) = Compilation manifest resolutions compiler (map2 k a) + +instance FsEncodable CompilerCache where + encodeFs = case _ of + Compilation (Manifest manifest) resolutions compiler next -> do + let + baseKey = "Compilation__" <> PackageName.print manifest.name <> "__" <> Version.print manifest.version <> "__" <> Version.print compiler <> "__" + hashKey = do + let resolutions' = foldlWithIndex (\name prev version -> formatPackageVersion name version <> prev) "" resolutions + unsafePerformEffect $ Sha256.hashString resolutions' + cacheKey = baseKey <> Sha256.print hashKey + + let + codec = CJ.named "FindAllCompilersResult" $ CJ.Record.object + { target: Version.codec + , result: CJ.Common.either compilerFailureCodec CJ.null + } + + Exists.mkExists $ Cache.AsJson cacheKey codec next diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ac64c8e65..ac9ffc398 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -111,11 +111,12 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do , " has no untracked or dirty files, it is safe to pull the latest." ] pure true - Just files -> do - Log.debug $ Array.fold - [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - , NonEmptyArray.foldMap1 (append "\n - ") files - ] + Just _files -> do + -- This is a bit noisy, so commenting it out for now. + -- Log.debug $ Array.fold + -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + -- , NonEmptyArray.foldMap1 (append "\n - ") files + -- ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." diff --git a/app/src/App/CLI/Purs.purs b/app/src/App/CLI/Purs.purs index 7e8d22c90..e5706e3f1 100644 --- a/app/src/App/CLI/Purs.purs +++ b/app/src/App/CLI/Purs.purs @@ -4,6 +4,7 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record @@ -13,6 +14,16 @@ import Node.ChildProcess.Types (Exit(..)) import Node.Library.Execa as Execa import Registry.Version as Version +-- | The minimum compiler version that supports 'purs graph' +minPursGraph :: Version +minPursGraph = unsafeFromRight (Version.parse "0.14.0") + +minPursuitPublish :: Version +minPursuitPublish = unsafeFromRight (Version.parse "0.14.7") + +minLanguageCSTParser :: Version +minLanguageCSTParser = unsafeFromRight (Version.parse "0.15.0") + -- | Call a specific version of the PureScript compiler callCompiler_ :: { version :: Maybe Version, command :: PursCommand, cwd :: Maybe FilePath } -> Aff Unit callCompiler_ = void <<< callCompiler @@ -23,6 +34,22 @@ data CompilerFailure | MissingCompiler derive instance Eq CompilerFailure +derive instance Ord CompilerFailure + +compilerFailureCodec :: CJ.Codec CompilerFailure +compilerFailureCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError CompilerFailure + decode json = except do + map CompilationError (CJ.decode (CJ.array compilerErrorCodec) json) + <|> map UnknownError (CJ.decode CJ.string json) + <|> map (const MissingCompiler) (CJ.decode CJ.null json) + + encode :: CompilerFailure -> JSON + encode = case _ of + CompilationError errors -> CJ.encode (CJ.array compilerErrorCodec) errors + UnknownError message -> CJ.encode CJ.string message + MissingCompiler -> CJ.encode CJ.null unit type CompilerError = { position :: SourcePosition diff --git a/app/src/App/Effect/Cache.purs b/app/src/App/Effect/Cache.purs index 15808ff9d..3ea63452a 100644 --- a/app/src/App/Effect/Cache.purs +++ b/app/src/App/Effect/Cache.purs @@ -169,7 +169,6 @@ handleMemoryFs env = case _ of case inFs of Nothing -> pure $ reply Nothing Just entry -> do - Log.debug $ "Fell back to on-disk entry for " <> memory putMemoryImpl env.ref unit (Key memory (Const entry)) pure $ reply $ Just $ unCache entry Just cached -> @@ -227,8 +226,7 @@ getMemoryImpl ref (Key id (Reply reply)) = do let (unCache :: CacheValue -> b) = unsafeCoerce cache <- Run.liftEffect $ Ref.read ref case Map.lookup id cache of - Nothing -> do - Log.debug $ "No cache entry found for " <> id <> " in memory." + Nothing -> pure $ reply Nothing Just cached -> do pure $ reply $ Just $ unCache cached @@ -237,7 +235,6 @@ putMemoryImpl :: forall x r a. CacheRef -> a -> MemoryEncoding Const a x -> Run putMemoryImpl ref next (Key id (Const value)) = do let (toCache :: x -> CacheValue) = unsafeCoerce Run.liftEffect $ Ref.modify_ (Map.insert id (toCache value)) ref - Log.debug $ "Wrote cache entry for " <> id <> " in memory." pure next deleteMemoryImpl :: forall x r a. CacheRef -> MemoryEncoding Ignore a x -> Run (LOG + EFFECT + r) a @@ -276,7 +273,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readFile path)) >>= case _ of Left _ -> do - Log.debug $ "No cache found for " <> id <> " at path " <> path pure $ reply Nothing Right buf -> do pure $ reply $ Just buf @@ -285,7 +281,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 path)) >>= case _ of Left _ -> do - Log.debug $ "No cache file found for " <> id <> " at path " <> path pure $ reply Nothing Right content -> case JSON.parse content of Left parseError -> do @@ -308,7 +303,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as a buffer: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " as a buffer at path " <> path pure next AsJson id codec (Const value) -> do @@ -318,7 +312,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as JSON: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " at path " <> path <> " as JSON." pure next deleteFsImpl :: forall a b r. FilePath -> FsEncoding Ignore a b -> Run (LOG + AFF + r) a diff --git a/app/src/App/Effect/GitHub.purs b/app/src/App/Effect/GitHub.purs index 584832255..914a3aa92 100644 --- a/app/src/App/Effect/GitHub.purs +++ b/app/src/App/Effect/GitHub.purs @@ -242,8 +242,8 @@ request octokit githubRequest@{ route: route@(GitHubRoute method _ _), codec } = -- auto-expire cache entries. We will be behind GitHub at most this amount per repo. -- -- TODO: This 'diff' check should be removed once we have conditional requests. - Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 4.0 -> do - Log.debug $ "Found cache entry but it was modified more than 4 hours ago, refetching " <> printedRoute + Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 23.0 -> do + Log.debug $ "Found cache entry but it was modified more than 23 hours ago, refetching " <> printedRoute result <- requestWithBackoff octokit githubRequest Cache.put _githubCache (Request route) (result <#> \resp -> { response: CJ.encode codec resp, modified: now, etag: Nothing }) pure result @@ -265,10 +265,8 @@ requestWithBackoff octokit githubRequest = do Log.debug $ "Making request to " <> route <> " with base URL " <> githubApiUrl result <- Run.liftAff do let - retryOptions = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt err -> case err of + retryOptions = defaultRetry + { retryOnFailure = \attempt err -> case err of UnexpectedError _ -> false DecodeError _ -> false -- https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#exceeding-the-rate-limit diff --git a/app/src/App/Effect/PackageSets.purs b/app/src/App/Effect/PackageSets.purs index 5a250ba22..ccd78e1c2 100644 --- a/app/src/App/Effect/PackageSets.purs +++ b/app/src/App/Effect/PackageSets.purs @@ -428,7 +428,7 @@ validatePackageSet (PackageSet set) = do -- We can now attempt to produce a self-contained manifest index from the -- collected manifests. If this fails then the package set is not -- self-contained. - Tuple unsatisfied _ = ManifestIndex.maximalIndex (Set.fromFoldable success) + Tuple unsatisfied _ = ManifestIndex.maximalIndex ManifestIndex.IgnoreRanges (Set.fromFoldable success) -- Otherwise, we can check if we were able to produce an index from the -- package set alone, without errors. diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index cdd00eb1d..bd406ff25 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -252,7 +252,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Writing manifest for " <> formatted <> ":\n" <> printJson Manifest.codec manifest index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left error -> Except.throw $ Array.fold [ "Can't insert " <> formatted <> " into manifest index because it has unsatisfied dependencies:" @@ -275,7 +275,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Deleting manifest for " <> formatted index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left error -> Except.throw $ Array.fold [ "Can't delete " <> formatted <> " from manifest index because it would produce unsatisfied dependencies:" @@ -359,7 +359,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Just metadata -> do Log.debug $ "Successfully read metadata for " <> printedName <> " from path " <> path - Log.debug $ "Setting metadata cache to singleton entry (as cache was previosuly empty)." + Log.debug $ "Setting metadata cache to singleton entry (as cache was previously empty)." Cache.put _registryCache AllMetadata (Map.singleton name metadata) pure $ Just metadata @@ -836,8 +836,9 @@ readManifestIndexFromDisk root = do entries <- map partitionEithers $ for packages.success (ManifestIndex.readEntryFile root) case entries.fail of - [] -> case ManifestIndex.fromSet $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of + [] -> case ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of Left errors -> do + Log.debug $ "Could not read a valid manifest index from entry files: " <> Array.foldMap (Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) <<< NonEmptyArray.toArray) entries.success Except.throw $ append "Unable to read manifest index (some packages are not satisfiable): " $ Array.foldMap (append "\n - ") do Tuple name versions <- Map.toUnfoldable errors Tuple version dependency <- Map.toUnfoldable versions @@ -878,10 +879,10 @@ readAllMetadataFromDisk metadataDir = do entries <- Run.liftAff $ map partitionEithers $ for packages.success \name -> do result <- readJsonFile Metadata.codec (Path.concat [ metadataDir, PackageName.print name <> ".json" ]) - pure $ map (Tuple name) result + pure $ bimap (Tuple name) (Tuple name) result unless (Array.null entries.fail) do - Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (append "\n - ") entries.fail + Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (\(Tuple name err) -> "\n - " <> PackageName.print name <> ": " <> err) entries.fail Log.debug "Successfully read metadata entries." pure $ Map.fromFoldable entries.success diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index a9479d3f5..828759792 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -6,6 +6,8 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.JSDate as JSDate +import Data.String as String +import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff import Effect.Exception as Exception import Effect.Now as Now @@ -20,6 +22,7 @@ import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Legacy.Types (RawVersion(..)) +import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tar as Foreign.Tar import Registry.Location as Location @@ -28,8 +31,15 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +-- | Packages can be published via the legacy importer or a user via the API. We +-- | determine some information differently in these cases, such as the time the +-- | package was published. +data ImportType = Old | Recent + +derive instance Eq ImportType + -- | An effect for fetching package sources -data Source a = Fetch PackageSource FilePath Location String (Either String FetchedSource -> a) +data Source a = Fetch FilePath Location String (Either FetchError FetchedSource -> a) derive instance Functor Source @@ -40,27 +50,42 @@ _source = Proxy type FetchedSource = { path :: FilePath, published :: DateTime } +data FetchError + = GitHubOnly + | NoSubdir + | InaccessibleRepo Octokit.Address + | NoToplevelDir + | Fatal String + +printFetchError :: FetchError -> String +printFetchError = case _ of + GitHubOnly -> "Packages are only allowed to come from GitHub for now. See issue #15." + NoSubdir -> "Monorepos and the `subdir` key are not supported yet. See issue #16." + InaccessibleRepo { owner, repo } -> "Repository located at https://github.com/" <> owner <> "/" <> repo <> ".git is inaccessible or does not exist." + NoToplevelDir -> "Downloaded tarball has no top-level directory." + Fatal err -> "Unrecoverable error. " <> err + -- | Fetch the provided location to the provided destination path. -fetch :: forall r. PackageSource -> FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource -fetch source destination location ref = Except.rethrow =<< Run.lift _source (Fetch source destination location ref identity) +fetch :: forall r. FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource +fetch destination location ref = (Except.rethrow <<< lmap printFetchError) =<< Run.lift _source (Fetch destination location ref identity) -- | Run the SOURCE effect given a handler. interpret :: forall r a. (Source ~> Run r) -> Run (SOURCE + r) a -> Run r a interpret handler = Run.interpret (Run.on _source handler Run.send) -- | Handle the SOURCE effect by downloading package source to the file system. -handle :: forall r a. Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a -handle = case _ of - Fetch source destination location ref reply -> map (map reply) Except.runExcept do +handle :: forall r a. ImportType -> Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a +handle importType = case _ of + Fetch destination location ref reply -> map (map reply) Except.runExcept do Log.info $ "Fetching " <> printJson Location.codec location case location of Git _ -> do -- TODO: Support non-GitHub packages. Remember subdir when doing so. (See #15) - Except.throw "Packages are only allowed to come from GitHub for now. See #15" + Except.throw GitHubOnly GitHub { owner, repo, subdir } -> do -- TODO: Support subdir. In the meantime, we verify subdir is not present. (See #16) - when (isJust subdir) $ Except.throw "`subdir` is not supported for now. See #16" + when (isJust subdir) $ Except.throw NoSubdir case pursPublishMethod of -- This needs to be removed so that we can support non-GitHub packages (#15) @@ -73,41 +98,64 @@ handle = case _ of Log.debug $ "Using legacy Git clone to fetch package source at tag: " <> show { owner, repo, ref } let - repoDir = Path.concat [ destination, repo ] - - clonePackageAtTag = do - let url = Array.fold [ "https://github.com/", owner, "/", repo ] - let args = [ "clone", url, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] - withRetryOnTimeout (Git.gitCLI args Nothing) >>= case _ of - Cancelled -> Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> url <> " " <> ref - Failed err -> Aff.throwError $ Aff.error err - Succeeded _ -> pure unit + repoDir = Path.concat [ destination, repo <> "-" <> ref ] + + -- If a git clone is cancelled by the timeout, but had partially-cloned, then it will + -- leave behind files that prevent a retry. + retryOpts = defaultRetry + { cleanupOnCancel = FS.Extra.remove repoDir + , timeout = Milliseconds 15_000.0 + } + + cloneUrl = + Array.fold [ "https://github.com/", owner, "/", repo ] + + cloneArgs = + [ "clone", cloneUrl, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] + + clonePackageAtTag = + withRetry retryOpts (Git.gitCLI cloneArgs Nothing) >>= case _ of + Cancelled -> + Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> cloneUrl <> " " <> ref + Failed err -> + Aff.throwError $ Aff.error err + Succeeded _ -> + pure unit Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of + Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error -> do + Log.warn $ "Git clone command failed:\n " <> String.joinWith " " (Array.cons "git" cloneArgs) Log.error $ "Failed to clone git tag: " <> Aff.message error - Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref - Right _ -> Log.debug $ "Cloned package source to " <> repoDir + + -- We'll receive this message if we try to clone a repo which doesn't + -- exist, which is interpreted as an attempt to fetch a private repo. + let missingRepoErr = "fatal: could not read Username for 'https://github.com': terminal prompts disabled" + + if String.contains (String.Pattern missingRepoErr) (Aff.message error) then + Except.throw $ InaccessibleRepo { owner, repo } + else + Except.throw $ Fatal $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Log.debug $ "Getting published time..." let - getRefTime = case source of - LegacyPackage -> do - timestamp <- Except.rethrow =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) + getRefTime = case importType of + Old -> do + timestamp <- (Except.rethrow <<< lmap Fatal) =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) jsDate <- Run.liftEffect $ JSDate.parse timestamp dateTime <- case JSDate.toDateTime jsDate of - Nothing -> Except.throw $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate + Nothing -> Except.throw $ Fatal $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate Just parsed -> pure parsed pure dateTime - CurrentPackage -> + Recent -> Run.liftEffect Now.nowDateTime -- Cloning will result in the `repo` name as the directory name publishedTime <- Except.runExcept getRefTime >>= case _ of Left error -> do - Log.error $ "Failed to get published time: " <> error - Except.throw $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." + Log.error $ "Failed to get published time. " <> printFetchError error + Except.throw $ Fatal $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." Right value -> pure value pure { path: repoDir, published: publishedTime } @@ -122,12 +170,12 @@ handle = case _ of commit <- GitHub.getRefCommit { owner, repo } (RawVersion ref) >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at ref " <> ref <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref + Except.throw $ Fatal $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref Right result -> pure result GitHub.getCommitDate { owner, repo } commit >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at commit " <> commit <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref + Except.throw $ Fatal $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref Right a -> pure a let tarballName = ref <> ".tar.gz" @@ -139,16 +187,16 @@ handle = case _ of Run.liftAff $ Fetch.withRetryRequest archiveUrl {} case response of - Cancelled -> Except.throw $ "Could not download " <> archiveUrl + Cancelled -> Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.FetchError error) -> do Log.error $ "Failed to download " <> archiveUrl <> " because of an HTTP error: " <> Exception.message error - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do arrayBuffer <- Run.liftAff arrayBufferAff buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) Log.error $ "Failed to download " <> archiveUrl <> " because of a non-200 status code (" <> show status <> ") with body " <> bodyString - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Succeeded { arrayBuffer: arrayBufferAff } -> do arrayBuffer <- Run.liftAff arrayBufferAff Log.debug $ "Successfully downloaded " <> archiveUrl <> " into a buffer." @@ -156,14 +204,14 @@ handle = case _ of Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of Left error -> do Log.error $ "Downloaded " <> archiveUrl <> " but failed to write it to the file at path " <> absoluteTarballPath <> ":\n" <> Aff.message error - Except.throw $ "Could not download " <> archiveUrl <> " due to an internal error." + Except.throw $ Fatal $ "Could not download " <> archiveUrl <> " due to an internal error." Right _ -> Log.debug $ "Tarball downloaded to " <> absoluteTarballPath Log.debug "Verifying tarball..." Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of Nothing -> - Except.throw "Downloaded tarball from GitHub has no top-level directory." + Except.throw NoToplevelDir Just path -> do Log.debug "Extracting the tarball..." Tar.extract { cwd: destination, archive: tarballName } diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 2c02604c4..56422ab64 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -58,7 +58,7 @@ main = launchAff_ $ do Right packageOperation -> case packageOperation of Publish payload -> - API.publish CurrentPackage payload + API.publish Nothing payload Authenticated payload -> do -- If we receive an authenticated operation via GitHub, then we -- re-sign it with pacchettibotti credentials if and only if the @@ -98,10 +98,11 @@ main = launchAff_ $ do # Registry.interpret (Registry.handle registryEnv) # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret Source.handle + # Source.interpret (Source.handle Source.Recent) # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) -- Caching & logging # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) + # Cache.interpret API._compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) # Log.interpret (Log.handleTerminal Verbose) diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 7788b16c2..7197a6001 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -11,7 +11,6 @@ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Either as Either import Data.Exists as Exists -import Data.FunctorWithIndex (mapWithIndex) import Data.Map (SemigroupMap(..)) import Data.Map as Map import Data.Ord.Max (Max(..)) @@ -38,7 +37,7 @@ import Registry.App.Legacy.LenientRange as LenientRange import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (LegacyPackageSet(..), LegacyPackageSetEntry, LegacyPackageSetUnion, RawPackageName(..), RawVersion(..), RawVersionRange(..), legacyPackageSetCodec, legacyPackageSetUnionCodec, rawPackageNameMapCodec, rawVersionCodec, rawVersionRangeCodec) -import Registry.Foreign.Octokit (Address, GitHubError) +import Registry.Foreign.Octokit (Address, GitHubError(..)) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp import Registry.License as License @@ -61,7 +60,8 @@ type LegacyManifest = } toManifest :: PackageName -> Version -> Location -> LegacyManifest -> Manifest -toManifest name version location { license, description, dependencies } = do +toManifest name version location legacy = do + let { license, description, dependencies } = patchLegacyManifest name version legacy let includeFiles = Nothing let excludeFiles = Nothing let owners = Nothing @@ -140,21 +140,13 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr Left bowerError, Left _ -> Left bowerError Right bowerDeps, Left _ -> Right bowerDeps Left _, Right spagoDeps -> Right spagoDeps - Right bowerDeps, Right spagoDeps -> Right do - bowerDeps # mapWithIndex \package range -> - case Map.lookup package spagoDeps of - Nothing -> range - Just spagoRange -> Range.union range spagoRange + Right bowerDeps, Right spagoDeps -> Right $ Map.unionWith Range.union bowerDeps spagoDeps unionPackageSets = case maybePackageSetDeps, unionManifests of Nothing, Left manifestError -> Left manifestError Nothing, Right manifestDeps -> Right manifestDeps Just packageSetDeps, Left _ -> Right packageSetDeps - Just packageSetDeps, Right manifestDeps -> Right do - packageSetDeps # mapWithIndex \package range -> - case Map.lookup package manifestDeps of - Nothing -> range - Just manifestRange -> Range.union range manifestRange + Just packageSetDeps, Right manifestDeps -> Right $ Map.unionWith Range.union manifestDeps packageSetDeps Run.Except.rethrowAt _legacyManifestError unionPackageSets @@ -173,6 +165,44 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr pure { license, dependencies, description } +-- | Some legacy manifests must be patched to be usable. +patchLegacyManifest :: PackageName -> Version -> LegacyManifest -> LegacyManifest +patchLegacyManifest name version legacy = do + let bolson = unsafeFromRight (PackageName.parse "bolson") + let hyrule = unsafeFromRight (PackageName.parse "hyrule") + + let unsafeVersion = unsafeFromRight <<< Version.parse + let unsafeRange a b = unsafeFromJust (Range.mk (unsafeVersion a) (unsafeVersion b)) + let fixRange pkg range = Map.update (\_ -> Just range) pkg + + -- hyrule v2.2.0 removes a module that breaks all versions of bolson + -- prior to the versions below + let earlyHyruleFixedRange = unsafeRange "1.6.4" "2.2.0" + let earlyFixHyrule = fixRange hyrule earlyHyruleFixedRange + + -- hyrule v2.4.0 removes a module that breaks all versions of bolson, deku, + -- and rito prior to the versions below + let hyruleFixedRange = unsafeRange "2.0.0" "2.4.0" + let fixHyrule = fixRange hyrule hyruleFixedRange + + -- bolson v0.3.1 changes the type of a function that breaks deku until 0.9.21 + let bolsonFixedRange = unsafeRange "0.1.0" "0.3.2" + let fixBolson = fixRange bolson bolsonFixedRange + + case PackageName.print name of + "bolson" + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.4.0" -> legacy { dependencies = fixHyrule legacy.dependencies } + "deku" + | version < unsafeVersion "0.7.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.9.21" -> legacy { dependencies = fixBolson (fixHyrule legacy.dependencies) } + | version < unsafeVersion "0.9.25" -> legacy { dependencies = fixHyrule legacy.dependencies } + "rito" + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.3.5" -> legacy { dependencies = fixHyrule legacy.dependencies } + _ -> + legacy + _legacyManifestError :: Proxy "legacyManifestError" _legacyManifestError = Proxy @@ -224,16 +254,22 @@ fetchLegacyManifestFiles :: forall r . Address -> RawVersion - -> Run (GITHUB + LOG + AFF + EFFECT + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) + -> Run (GITHUB + LOG + AFF + EFFECT + EXCEPT String + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) fetchLegacyManifestFiles address ref = do eitherBower <- fetchBowerfile address ref - void $ flip ltraverse eitherBower \error -> - Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherBower case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error eitherSpago <- fetchSpagoDhallJson address ref - void $ flip ltraverse eitherSpago \error -> - Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherSpago case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error pure $ case eitherBower, eitherSpago of - Left _, Left _ -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available." } + Left errL, Left errR -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available: " <> Octokit.printGitHubError errL <> ", " <> Octokit.printGitHubError errR } Right bower, Left _ -> Right $ This bower Left _, Right spago -> Right $ That spago Right bower, Right spago -> Right $ Both bower spago diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 311a15aa5..7a046414d 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -1,6 +1,5 @@ module Registry.App.Prelude ( LogVerbosity(..) - , PackageSource(..) , PursPublishMethod(..) , Retry , RetryResult(..) @@ -23,7 +22,6 @@ module Registry.App.Prelude , parseYaml , partitionEithers , printJson - , printPackageSource , pursPublishMethod , readJsonFile , readYamlFile @@ -173,7 +171,9 @@ withRetryOnTimeout = withRetry defaultRetry type Retry err = { timeout :: Aff.Milliseconds + , cleanupOnCancel :: Extra.Aff Unit , retryOnCancel :: Int -> Boolean + , cleanupOnFailure :: err -> Extra.Aff Unit , retryOnFailure :: Int -> err -> Boolean } @@ -182,7 +182,9 @@ type Retry err = defaultRetry :: forall err. Retry err defaultRetry = { timeout: Aff.Milliseconds 5000.0 + , cleanupOnCancel: pure unit , retryOnCancel: \attempt -> attempt <= 3 + , cleanupOnFailure: \_ -> pure unit , retryOnFailure: \_ _ -> false } @@ -196,7 +198,7 @@ derive instance (Eq err, Eq a) => Eq (RetryResult err a) -- | Attempt an effectful computation that can fail by specifying how to retry -- | the request and whether it should time out. withRetry :: forall err a. Retry err -> Extra.Aff (Either.Either err a) -> Extra.Aff (RetryResult err a) -withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } action = do +withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure, cleanupOnCancel, cleanupOnFailure } action = do let runAction :: Extra.Aff (Either.Either err a) -> Int -> Extra.Aff (RetryResult err a) runAction action' ms = do @@ -217,14 +219,18 @@ withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } a Cancelled -> if retryOnCancel attempt then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnCancel retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnCancel pure Cancelled Failed err -> if retryOnFailure attempt err then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnFailure err retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnFailure err pure (Failed err) Succeeded result -> pure (Succeeded result) @@ -255,15 +261,3 @@ data PursPublishMethod = LegacyPursPublish | PursPublish -- | The current purs publish method pursPublishMethod :: PursPublishMethod pursPublishMethod = LegacyPursPublish - --- | Operations can be exercised for old, pre-registry packages, or for packages --- | which are on the 0.15 compiler series. If a true legacy package is uploaded --- | then we do not require compilation to succeed and we don't publish docs. -data PackageSource = LegacyPackage | CurrentPackage - -derive instance Eq PackageSource - -printPackageSource :: PackageSource -> String -printPackageSource = case _ of - LegacyPackage -> "legacy" - CurrentPackage -> "current" diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index 659b4ad8a..64e1bcc86 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -19,6 +19,7 @@ import Node.Process as Process import Record as Record import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) import Registry.API.V1 as V1 +import Registry.App.API (COMPILER_CACHE, _compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) @@ -68,7 +69,7 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish forkPipelineJob publish.name publish.ref PublishJob \jobId -> do Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish CurrentPackage publish + API.publish Nothing publish Unpublish, Post -> do auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body @@ -216,7 +217,7 @@ createServerEnv = do , jobId: Nothing } -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response runServer env router' request = do @@ -318,9 +319,10 @@ runEffects env operation = Aff.attempt do ) # Pursuit.interpret (Pursuit.handleAff env.vars.token) # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret Source.handle + # Source.interpret (Source.handle Source.Recent) # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) # Except.catch ( \msg -> do finishedAt <- nowUTC diff --git a/app/src/Fetch/Retry.purs b/app/src/Fetch/Retry.purs index 4260f6e46..cd182385a 100644 --- a/app/src/Fetch/Retry.purs +++ b/app/src/Fetch/Retry.purs @@ -43,10 +43,8 @@ withRetryRequest url opts = withRetry retry do if response.status >= 400 then Left $ StatusError response else Right response - retry = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt -> case _ of + retry = defaultRetry + { retryOnFailure = \attempt -> case _ of FetchError _ -> false StatusError { status } -> -- We retry on 500-level errors in case the server is temporarily diff --git a/app/test/App/API.purs b/app/test/App/API.purs index faf4df3a3..fca1f14c0 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -9,6 +9,7 @@ import Data.Set as Set import Data.String as String import Data.String.NonEmpty as NonEmptyString import Effect.Aff as Aff +import Effect.Class.Console as Console import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path @@ -27,8 +28,10 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Manifest as Manifest +import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName import Registry.Range as Range +import Registry.Solver as Solver import Registry.Test.Assert as Assert import Registry.Test.Assert.Run as Assert.Run import Registry.Test.Utils as Utils @@ -57,22 +60,21 @@ spec = do removeIgnoredTarballFiles copySourceFiles - Spec.describe "Parses installed paths" do - Spec.it "Parses install path /my-package-1.0.0/..." do - tmp <- Tmp.mkTmpDir - let moduleA = Path.concat [ tmp, "my-package-1.0.0", "src", "ModuleA.purs" ] - case API.parseInstalledModulePath { prefix: tmp, path: moduleA } of - Left err -> Assert.fail $ "Expected to parse " <> moduleA <> " but got error: " <> err - Right { name, version } -> do - Assert.shouldEqual name (Utils.unsafePackageName "my-package") - Assert.shouldEqual version (Utils.unsafeVersion "1.0.0") - FS.Extra.remove tmp - Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do + logs <- liftEffect (Ref.new []) + let + toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry + toLegacyIndex = + Solver.exploreAllTransitiveDependencies + <<< Solver.initializeRegistry + <<< map (map (_.dependencies <<< un Manifest)) + <<< ManifestIndex.toMap + testEnv = { workdir + , logs , index , metadata , pursuitExcludes: Set.singleton (Utils.unsafePackageName "type-equality") @@ -81,7 +83,7 @@ spec = do , github: githubDir } - Assert.Run.runTestEffects testEnv do + result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do -- We'll publish effect@4.0.0 from the fixtures/github-packages -- directory, which has an unnecessary dependency on 'type-equality' -- inserted into it. @@ -90,7 +92,7 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.4" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref @@ -98,7 +100,8 @@ spec = do } -- First, we publish the package. - API.publish CurrentPackage publishArgs + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -127,9 +130,24 @@ spec = do , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies ] + -- We should verify the resulting metadata file is correct + Metadata effectMetadata <- Registry.readMetadata name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print name <> " to be in metadata." + Just m -> pure m + + case Map.lookup version effectMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." + Just published -> case published.compilers of + Left one -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix but unfinished single version: " <> Version.print one + Right many -> do + let many' = NonEmptyArray.toArray many + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + -- Finally, we can verify that publishing the package again should fail -- since it already exists. - Except.runExcept (API.publish CurrentPackage publishArgs) >>= case _ of + Except.runExcept (API.publish Nothing publishArgs) >>= case _ of Left _ -> pure unit Right _ -> Except.throw $ "Expected publishing " <> formatPackageVersion name version <> " twice to fail." @@ -138,14 +156,66 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.4" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" , resolutions: Nothing } - API.publish CurrentPackage pursuitOnlyPublishArgs + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + + -- We can also verify that transitive dependencies are added for legacy + -- packages. + let + transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } + transitivePublishArgs = + { compiler: Utils.unsafeVersion "0.15.4" + , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } + , name: transitive.name + , ref: "v" <> Version.print transitive.version + , resolutions: Nothing + } + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + + -- We should verify the resulting metadata file is correct + Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in metadata." + Just m -> pure m + + case Map.lookup transitive.version transitiveMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." + Just published -> case published.compilers of + Left one -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix but unfinished single version: " <> Version.print one + Right many -> do + let many' = NonEmptyArray.toArray many + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + + Registry.readManifest transitive.name transitive.version >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in manifest index." + Just (Manifest manifest) -> do + let expectedDeps = Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeRange ">=6.0.0 <7.0.0") + when (manifest.dependencies /= expectedDeps) do + Except.throw $ String.joinWith "\n" + [ "Expected transitive@1.0.0 to have dependencies" + , printJson (Internal.Codec.packageMap Range.codec) expectedDeps + , "\nbut got" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + ] + case result of + Left exn -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Got an Aff exception! " <> Aff.message exn + Right (Left err) -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err + Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do @@ -207,7 +277,7 @@ checkBuildPlanToResolutions = do Spec.it "buildPlanToResolutions produces expected resolutions file format" do Assert.shouldEqual generatedResolutions expectedResolutions where - dependenciesDir = "testDir" + installedResolutions = "testDir" resolutions = Map.fromFoldable [ Tuple (Utils.unsafePackageName "prelude") (Utils.unsafeVersion "1.0.0") @@ -218,14 +288,14 @@ checkBuildPlanToResolutions = do generatedResolutions = API.formatPursuitResolutions { resolutions - , dependenciesDir + , installedResolutions } expectedResolutions = Map.fromFoldable do packageName /\ version <- (Map.toUnfoldable resolutions :: Array _) let bowerName = RawPackageName ("purescript-" <> PackageName.print packageName) - path = Path.concat [ dependenciesDir, PackageName.print packageName <> "-" <> Version.print version ] + path = Path.concat [ installedResolutions, PackageName.print packageName <> "-" <> Version.print version ] pure $ Tuple bowerName { path, version } removeIgnoredTarballFiles :: Spec.Spec Unit diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 8e8207974..e3279f68b 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -2,6 +2,7 @@ module Test.Registry.App.Legacy.PackageSet (spec) where import Registry.App.Prelude +import Data.Array.NonEmpty as NonEmptyArray import Data.DateTime (DateTime(..)) import Data.Either as Either import Data.Map as Map @@ -96,7 +97,7 @@ convertedPackageSet = Left err -> unsafeCrashWith err Right value -> value where - index = unsafeFromRight $ ManifestIndex.fromSet $ Set.fromFoldable + index = unsafeFromRight $ ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable [ mkManifest assert [ console, effect, prelude ] , mkManifest console [ effect, prelude ] , mkManifest effect [ prelude ] @@ -208,6 +209,7 @@ unsafeMetadataEntry (Tuple name version) = do { ref: LenientVersion.raw version , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" , bytes: 0.0 + , compilers: Right (NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2")) , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom } diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 2eaca689d..42cc7d6ab 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -11,17 +11,22 @@ module Registry.Test.Assert.Run import Registry.App.Prelude import Data.Array as Array +import Data.Exists as Exists import Data.Foldable (class Foldable) import Data.Foldable as Foldable import Data.FunctorWithIndex (mapWithIndex) import Data.Map as Map import Data.Set as Set import Data.String as String +import Dodo as Dodo import Effect.Aff as Aff import Effect.Now as Now import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path +import Registry.API.V1 (LogLevel) +import Registry.App.API (COMPILER_CACHE) +import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache @@ -39,7 +44,7 @@ import Registry.App.Effect.Pursuit (PURSUIT, Pursuit(..)) import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry (REGISTRY, Registry(..)) import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE, Source(..)) +import Registry.App.Effect.Source (FetchError(..), SOURCE, Source(..)) import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE, Storage) import Registry.App.Effect.Storage as Storage @@ -83,6 +88,7 @@ type TEST_EFFECTS = + RESOURCE_ENV + GITHUB_CACHE + LEGACY_CACHE + + COMPILER_CACHE + COMMENT + LOG + EXCEPT String @@ -93,6 +99,7 @@ type TEST_EFFECTS = type TestEnv = { workdir :: FilePath + , logs :: Ref (Array (Tuple LogLevel String)) , metadata :: Ref (Map PackageName Metadata) , index :: Ref ManifestIndex , pursuitExcludes :: Set PackageName @@ -101,8 +108,8 @@ type TestEnv = , username :: String } -runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff a -runTestEffects env operation = do +runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff (Either Aff.Error a) +runTestEffects env operation = Aff.attempt do resourceEnv <- Env.lookupResourceEnv githubCache <- liftEffect Cache.newCacheRef legacyCache <- liftEffect Cache.newCacheRef @@ -118,18 +125,19 @@ runTestEffects env operation = do # Env.runPacchettiBottiEnv { publicKey: "Unimplemented", privateKey: "Unimplemented" } # Env.runResourceEnv resourceEnv -- Caches + # runCompilerCacheMock # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects # Comment.interpret Comment.handleLog - # Log.interpret (\(Log _ _ next) -> pure next) + # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) # Run.runBaseAff' -- | For testing simple Run functions that don't need the whole environment. runBaseEffects :: forall a. Run (LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff a -runBaseEffects = +runBaseEffects = do Log.interpret (\(Log _ _ next) -> pure next) -- Base effects >>> Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) @@ -141,6 +149,22 @@ runLegacyCacheMemory = Cache.interpret Legacy.Manifest._legacyCache <<< Cache.ha runGitHubCacheMemory :: forall r a. CacheRef -> Run (GITHUB_CACHE + LOG + EFFECT + r) a -> Run (LOG + EFFECT + r) a runGitHubCacheMemory = Cache.interpret GitHub._githubCache <<< Cache.handleMemory +runCompilerCacheMock :: forall r a. Run (COMPILER_CACHE + LOG + r) a -> Run (LOG + r) a +runCompilerCacheMock = Cache.interpret API._compilerCache case _ of + Cache.Get key -> Exists.runExists getImpl (Cache.encodeFs key) + Cache.Put _ next -> pure next + Cache.Delete key -> Exists.runExists deleteImpl (Cache.encodeFs key) + where + getImpl :: forall x z. Cache.FsEncoding Cache.Reply x z -> Run _ x + getImpl = case _ of + Cache.AsBuffer _ (Cache.Reply reply) -> pure $ reply Nothing + Cache.AsJson _ _ (Cache.Reply reply) -> pure $ reply Nothing + + deleteImpl :: forall x z. Cache.FsEncoding Cache.Ignore x z -> Run _ x + deleteImpl = case _ of + Cache.AsBuffer _ (Cache.Ignore next) -> pure next + Cache.AsJson _ _ (Cache.Ignore next) -> pure next + type PursuitMockEnv = { excludes :: Set PackageName , metadataRef :: Ref (Map PackageName Metadata) @@ -179,7 +203,7 @@ handleRegistryMock env = case _ of WriteManifest manifest reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left err -> pure $ reply $ Left $ "Failed to insert manifest:\n" <> Utils.unsafeStringify manifest <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) @@ -187,7 +211,7 @@ handleRegistryMock env = case _ of DeleteManifest name version reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left err -> pure $ reply $ Left $ "Failed to delete entry for :\n" <> Utils.formatPackageVersion name version <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) @@ -282,11 +306,11 @@ type SourceMockEnv = { github :: FilePath } handleSourceMock :: forall r a. SourceMockEnv -> Source a -> Run (EXCEPT String + AFF + EFFECT + r) a handleSourceMock env = case _ of - Fetch _source destination location ref reply -> do + Fetch destination location ref reply -> do now <- Run.liftEffect Now.nowDateTime case location of - Git _ -> pure $ reply $ Left "Packages cannot be published from Git yet (only GitHub)." - GitHub { subdir } | isJust subdir -> pure $ reply $ Left "Packages cannot use the 'subdir' key yet." + Git _ -> pure $ reply $ Left GitHubOnly + GitHub { subdir } | isJust subdir -> pure $ reply $ Left NoSubdir GitHub { repo } -> do let name = stripPureScriptPrefix repo @@ -295,7 +319,7 @@ handleSourceMock env = case _ of localPath = Path.concat [ env.github, dirname ] destinationPath = Path.concat [ destination, dirname <> "-checkout" ] Run.liftAff (Aff.attempt (FS.Aff.stat localPath)) >>= case _ of - Left _ -> pure $ reply $ Left $ "Cannot copy " <> localPath <> " because it does not exist." + Left _ -> pure $ reply $ Left $ Fatal $ "Cannot copy " <> localPath <> " because it does not exist." Right _ -> do Run.liftAff $ FS.Extra.copy { from: localPath, to: destinationPath, preserveTimestamps: true } case pursPublishMethod of diff --git a/flake.nix b/flake.nix index 7784bce83..191561aa5 100644 --- a/flake.nix +++ b/flake.nix @@ -70,6 +70,11 @@ # (typically >4GB), and source packgaes really ought not be shipping large # files — just source code. GIT_LFS_SKIP_SMUDGE = 1; + + # We disable git from entering interactive mode at any time, as there is no + # one there to answer prompts. + GIT_TERMINAL_PROMPT = 0; + registryOverlay = final: prev: rec { nodejs = prev.nodejs_20; @@ -284,7 +289,12 @@ # according to the env.example file, or to the values explicitly set below # (e.g. DHALL_PRELUDE and DHALL_TYPES). defaultEnv = parseEnv ./.env.example // { - inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; + inherit + DHALL_PRELUDE + DHALL_TYPES + GIT_LFS_SKIP_SMUDGE + GIT_TERMINAL_PROMPT + ; }; # Parse a .env file, skipping empty lines and comments, into Nix attrset @@ -797,7 +807,7 @@ # Then we poll for job results, expecting an eventual 'success'. try_count = 0 - delay_seconds = 3 + delay_seconds = 5 prev_timestamp = "2023-07-29T00:00:00.000Z" log_level = "DEBUG" while True: @@ -809,7 +819,12 @@ success = poll_result['success'] assert success, f"GET /jobs/{job_id} should return success, but it returned {poll_result}" break - elif (try_count * delay_seconds) > 60: + + # A fairly long timeout because of the requirement to compile packages. + # FIXME: once this is split into multiple jobs, the timeout can be adjusted + # to a smaller number, e.g. 45 seconds maximum, but we can allow extra time + # for the subsequent compilation jobs to complete. + elif (try_count * delay_seconds) > 180: raise ValueError(f"Cancelling publish request after {try_count * delay_seconds} seconds, this is too long...") else: print(f"Job is still ongoing, retrying in {delay_seconds} seconds...") @@ -821,7 +836,7 @@ devShells = { default = pkgs.mkShell { - inherit GIT_LFS_SKIP_SMUDGE; + inherit GIT_LFS_SKIP_SMUDGE GIT_TERMINAL_PROMPT; name = "registry-dev"; packages = with pkgs; [ diff --git a/foreign/src/Foreign/Octokit.purs b/foreign/src/Foreign/Octokit.purs index c0258b096..d7787466b 100644 --- a/foreign/src/Foreign/Octokit.purs +++ b/foreign/src/Foreign/Octokit.purs @@ -207,12 +207,17 @@ getCommitDateRequest { address, commitSha } = , headers: Object.empty , args: noArgs , paginate: false - , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "Commit" $ CJ.Record.object - { committer: CJ.Record.object { date: Internal.Codec.iso8601DateTime } } + , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "CommitData" $ CJ.Record.object + { data: CJ.named "Commit" $ CJ.Record.object + { committer: CJ.named "Commit.committer" $ CJ.Record.object + { date: Internal.Codec.iso8601DateTime + } + } + } } where - toJsonRep date = { committer: { date } } - fromJsonRep = _.committer.date + toJsonRep date = { data: { committer: { date } } } + fromJsonRep = _.data.committer.date -- | Create a comment on an issue. Requires authentication. -- | https://github.com/octokit/plugin-rest-endpoint-methods.js/blob/v5.16.0/docs/issues/createComment.md diff --git a/foreign/src/Foreign/Tmp.js b/foreign/src/Foreign/Tmp.js index b11d10232..8995afdfc 100644 --- a/foreign/src/Foreign/Tmp.js +++ b/foreign/src/Foreign/Tmp.js @@ -3,6 +3,6 @@ import { setGracefulCleanup, dirSync } from "tmp"; setGracefulCleanup(); export const mkTmpDirImpl = () => { - const tmpobj = dirSync(); + const tmpobj = dirSync({ template: 'XXXXXX' }); return tmpobj.name; }; diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index c867b5d9b..4837b49ed 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -103,25 +103,18 @@ lookup name version (ManifestIndex index) = -- | Insert a new manifest into the manifest index, failing if the manifest -- | indicates dependencies that cannot be satisfied. Dependencies are not -- | satisfied if the package is not in the index. -insert :: Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex -insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do +insert :: IncludeRanges -> Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex +insert consider manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do let unsatisfied :: Map PackageName Range unsatisfied = Map.fromFoldable do Tuple dependency range <- Map.toUnfoldable dependencies case Map.lookup dependency index of - Just _versions -> - -- Ideally we would enforce that inserting a manifest requires that - -- at least one version exists in the index in the given range already - -- Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> - -- - -- However, to be somewhat lenient on what packages can be admitted to - -- the official index, we just look to see the package name exists. - -- - -- Note that if we _do_ add this check later on, we will need to - -- produce an alternate version that does not check version bounds for - -- use in validatiing package sets, ie. 'maximalIndexIgnoringBounds' - [] + Just versions -> case consider of + IgnoreRanges -> [] + ConsiderRanges + | Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> [] + | otherwise -> [ Tuple dependency range ] _ -> [ Tuple dependency range ] @@ -137,12 +130,12 @@ insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) -- | package names (and not package versions), it is always acceptable to delete -- | a package version so long as it has at least 2 versions. However, removing -- | a package altogether incurs a full validation check. -delete :: PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -delete name version (ManifestIndex index) = do +delete :: IncludeRanges -> PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +delete consider name version (ManifestIndex index) = do case Map.lookup name index of Nothing -> pure (ManifestIndex index) Just versionsMap | Map.size versionsMap == 1 -> - fromSet $ Set.fromFoldable do + fromSet consider $ Set.fromFoldable do Tuple _ versions <- Map.toUnfoldableUnordered (Map.delete name index) Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] @@ -151,21 +144,21 @@ delete name version (ManifestIndex index) = do -- | Convert a set of manifests into a `ManifestIndex`. Reports all failures -- | encountered rather than short-circuiting. -fromSet :: Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -fromSet manifests = do - let Tuple failed index = maximalIndex manifests +fromSet :: IncludeRanges -> Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +fromSet consider manifests = do + let Tuple failed index = maximalIndex consider manifests if Map.isEmpty failed then Right index else Left failed -- | Produce the maximal `ManifestIndex` possible for the given set of -- | `Manifest`s, collecting failures along the way. -maximalIndex :: Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -maximalIndex manifests = do +maximalIndex :: IncludeRanges -> Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +maximalIndex consider manifests = do let - insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert manifest index of + insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert consider manifest index of Left errors -> Tuple (Map.insertWith Map.union name (Map.singleton version errors) failed) index Right newIndex -> Tuple failed newIndex - Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort IgnoreRanges manifests) + Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort consider manifests) data IncludeRanges = ConsiderRanges diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index 62fe3c5e8..5079baf0a 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,15 +20,20 @@ module Registry.Metadata import Prelude +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) import Data.Array.NonEmpty (NonEmptyArray) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.DateTime (DateTime) +import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Location (Location) import Registry.Location as Location @@ -37,6 +42,7 @@ import Registry.Owner as Owner import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) +import Registry.Version as Version -- | A record of all published and unpublished versions of a package, along with -- | the last-used location and any owners (public keys) authorized to take @@ -67,18 +73,34 @@ codec = Profunctor.wrapIso Metadata $ CJ.named "Metadata" $ CJ.object -- | not rely on its presence! type PublishedMetadata = { bytes :: Number + , compilers :: Either Version (NonEmptyArray Version) , hash :: Sha256 , publishedTime :: DateTime + + -- UNSPECIFIED: Will be removed in the future. , ref :: String } publishedMetadataCodec :: CJ.Codec PublishedMetadata publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object { bytes: CJ.number + , compilers: compilersCodec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime , ref: CJ.string } + where + compilersCodec :: CJ.Codec (Either Version (NonEmptyArray Version)) + compilersCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError (Either Version (NonEmptyArray Version)) + decode json = except do + map Left (CJ.decode Version.codec json) + <|> map Right (CJ.decode (CJ.Common.nonEmptyArray Version.codec) json) + + encode = case _ of + Left version -> CJ.encode Version.codec version + Right versions -> CJ.encode (CJ.Common.nonEmptyArray Version.codec) versions -- | Metadata about an unpublished package version. type UnpublishedMetadata = diff --git a/lib/src/Operation/Validation.purs b/lib/src/Operation/Validation.purs index 0dc31e283..c842145d9 100644 --- a/lib/src/Operation/Validation.purs +++ b/lib/src/Operation/Validation.purs @@ -5,10 +5,10 @@ import Prelude import Data.Array as Array import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Bifunctor as Bifunctor import Data.DateTime (DateTime) import Data.DateTime as DateTime import Data.Either (Either(..)) -import Data.List.NonEmpty (NonEmptyList) import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..), maybe) @@ -20,7 +20,7 @@ import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.Time.Duration (Hours(..)) import Data.Traversable (traverse) -import Data.Tuple (Tuple(..), uncurry) +import Data.Tuple (Tuple(..), snd, uncurry) import Data.Tuple.Nested (type (/\), (/\)) import Effect.Aff as Aff import Effect.Aff.Class (class MonadAff, liftAff) @@ -32,14 +32,15 @@ import PureScript.CST.Errors as CST.Errors import PureScript.CST.Types as CST.Types import Registry.Location (Location) import Registry.Manifest (Manifest(..)) -import Registry.ManifestIndex (ManifestIndex) -import Registry.ManifestIndex as ManifestIndex import Registry.Metadata (Metadata(..), PublishedMetadata, UnpublishedMetadata) import Registry.Operation (PublishData) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.PursGraph (AssociatedError, ModuleName, PursGraph) +import Registry.PursGraph as PursGraph import Registry.Range (Range) import Registry.Range as Range +import Registry.Solver (CompilerIndex) import Registry.Solver as Solver import Registry.Version (Version) @@ -71,11 +72,63 @@ isNotUnpublished :: Manifest -> Metadata -> Maybe UnpublishedMetadata isNotUnpublished (Manifest { version }) (Metadata { unpublished }) = Map.lookup version unpublished +data ValidateDepsError + = UnusedDependencies (NonEmptySet PackageName) + | MissingDependencies (NonEmptySet PackageName) + | UnusedAndMissing { unused :: NonEmptySet PackageName, missing :: NonEmptySet PackageName } + +derive instance Eq ValidateDepsError + +printValidateDepsError :: ValidateDepsError -> String +printValidateDepsError = case _ of + UnusedDependencies unused -> + "Unused dependencies (" <> printPackages unused <> ")" + MissingDependencies missing -> + "Missing dependencies (" <> printPackages missing <> ")" + UnusedAndMissing { unused, missing } -> + "Unused dependencies (" <> printPackages unused <> ") and missing dependencies (" <> printPackages missing <> ")" + where + printPackages :: NonEmptySet PackageName -> String + printPackages = String.joinWith ", " <<< map PackageName.print <<< NonEmptySet.toUnfoldable + +-- | Verifies that the manifest lists dependencies imported in the source code, +-- | no more (ie. unused) and no less (ie. transitive). The graph passed to this +-- | function should be the output of 'purs graph' executed on the 'output' +-- | directory of the package compiled with its dependencies. +noTransitiveOrMissingDeps :: Manifest -> PursGraph -> (FilePath -> Either String PackageName) -> Either (Either (NonEmptyArray AssociatedError) ValidateDepsError) Unit +noTransitiveOrMissingDeps (Manifest manifest) graph parser = do + associated <- Bifunctor.lmap Left $ PursGraph.associateModules parser graph + + let + packageModules :: Set ModuleName + packageModules = Map.keys $ Map.filter (_ == manifest.name) associated + + directImportModules :: Set ModuleName + directImportModules = PursGraph.directDependenciesOf packageModules graph + + directImportPackages :: Set PackageName + directImportPackages = Set.mapMaybe (flip Map.lookup associated) directImportModules + + -- Unused packages are those which are listed in the manifest dependencies + -- but which are not imported by the package source code. + unusedDependencies :: Set PackageName + unusedDependencies = Set.filter (not <<< flip Set.member directImportPackages) (Map.keys manifest.dependencies) + + -- Missing packages are those which are imported by the package source code + -- but which are not listed in its dependencies. + missingDependencies :: Set PackageName + missingDependencies = Set.filter (not <<< flip Map.member manifest.dependencies) directImportPackages + + case NonEmptySet.fromSet unusedDependencies, NonEmptySet.fromSet missingDependencies of + Nothing, Nothing -> Right unit + Just unused, Nothing -> Left $ Right $ UnusedDependencies unused + Nothing, Just missing -> Left $ Right $ MissingDependencies missing + Just unused, Just missing -> Left $ Right $ UnusedAndMissing { unused, missing } + -- | Verifies that the manifest dependencies are solvable by the registry solver. -validateDependenciesSolve :: Manifest -> ManifestIndex -> Either (NonEmptyList Solver.SolverError) (Map PackageName Version) -validateDependenciesSolve manifest manifestIndex = do - let getDependencies = _.dependencies <<< un Manifest - Solver.solve (map (map getDependencies) (ManifestIndex.toMap manifestIndex)) (getDependencies manifest) +validateDependenciesSolve :: Version -> Manifest -> CompilerIndex -> Either Solver.SolverErrors (Map PackageName Version) +validateDependenciesSolve compiler (Manifest manifest) compilerIndex = + map snd $ Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies -- | Verifies that all dependencies in the manifest are present in the build -- | plan, and the version listed in the build plan is within the range provided @@ -97,23 +150,6 @@ getUnresolvedDependencies (Manifest { dependencies }) resolutions = | not (Range.includes dependencyRange version) -> Just $ Right $ dependencyName /\ dependencyRange /\ version | otherwise -> Nothing --- | Discovers dependencies listed in the manifest that are not actually used --- | by the solved dependencies. This should not produce an error, but it --- | indicates an over-constrained manifest. -getUnusedDependencies :: Manifest -> Map PackageName Version -> Set PackageName -> Maybe (NonEmptySet PackageName) -getUnusedDependencies (Manifest { dependencies }) resolutions discovered = do - let - -- There may be too many resolved dependencies because the manifest includes - -- e.g. test dependencies, so we start by only considering resolved deps - -- that are actually used. - inUse = Set.filter (flip Set.member discovered) (Map.keys resolutions) - - -- Next, we can determine which dependencies are unused by looking at the - -- difference between the manifest dependencies and the resolved packages - unused = Set.filter (not <<< flip Set.member inUse) (Map.keys dependencies) - - NonEmptySet.fromSet unused - data TarballSizeResult = ExceedsMaximum Number | WarnPackageSize Number diff --git a/lib/src/PursGraph.purs b/lib/src/PursGraph.purs index 5ed1e512b..d95bff119 100644 --- a/lib/src/PursGraph.purs +++ b/lib/src/PursGraph.purs @@ -79,7 +79,17 @@ associateModules parse graph = do -- | Find direct dependencies of the given module, according to the given graph. directDependencies :: ModuleName -> PursGraph -> Maybe (Set ModuleName) -directDependencies name = map (Set.fromFoldable <<< _.depends) <<< Map.lookup name +directDependencies start graph = Map.lookup start graph <#> \_ -> directDependenciesOf (Set.singleton start) graph + +-- | Find direct dependencies of a set of input modules according to the given +-- | graph, excluding the input modules themselves. +directDependenciesOf :: Set ModuleName -> PursGraph -> Set ModuleName +directDependenciesOf sources graph = do + let + foldFn prev name = case Map.lookup name graph of + Nothing -> prev + Just { depends } -> Set.union prev (Array.foldl (\acc mod -> if Set.member mod sources then acc else Set.insert mod acc) Set.empty depends) + Array.foldl foldFn Set.empty $ Set.toUnfoldable sources -- | Find all dependencies of the given module, according to the given graph, -- | excluding the module itself. diff --git a/lib/src/Range.purs b/lib/src/Range.purs index 4438d2383..cae45746b 100644 --- a/lib/src/Range.purs +++ b/lib/src/Range.purs @@ -5,15 +5,16 @@ module Registry.Range ( Range , caret , codec + , exact , greaterThanOrEq , includes , intersect , lessThan + , mk , parse , parser , print , union - , mk ) where import Prelude @@ -147,6 +148,11 @@ mk lhs rhs | lhs < rhs = Just (Range { lhs, rhs }) mk _ _ = Nothing -- | Produce a "caret range" from a version. --- | I.e. "^0.15.6" ==> ">=0.15.6 > 0.16.0" +-- | i.e. "^0.15.6" ==> ">=0.15.6 > 0.16.0" caret :: Version -> Range caret v = Range { lhs: v, rhs: Version.bumpHighest v } + +-- | Produce an exact range from a version. +-- | i.e. "0.15.6" ==> ">=0.15.6 <0.15.7" +exact :: Version -> Range +exact v = Range { lhs: v, rhs: Version.bumpPatch v } diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index ac0086c76..fad71e937 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -5,9 +5,12 @@ import Prelude import Control.Alternative (guard) import Data.Array as Array +import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Array.NonEmpty as NonEmptyArray import Data.Bifunctor (lmap) import Data.Either (Either(..)) +import Data.Either as Either import Data.Foldable (fold, foldMap, intercalate) import Data.FoldableWithIndex (anyWithIndex, foldMapWithIndex, foldlWithIndex, forWithIndex_) import Data.Functor.App (App(..)) @@ -20,6 +23,7 @@ import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) import Data.Semigroup.Foldable (intercalateMap) +import Data.Semigroup.Foldable as Foldable1 import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -27,6 +31,11 @@ import Data.Set.NonEmpty as NES import Data.Traversable (for, sequence, traverse) import Data.TraversableWithIndex (forWithIndex, traverseWithIndex) import Data.Tuple (Tuple(..), fst, snd) +import Partial.Unsafe as Partial +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex (ManifestIndex) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata(..)) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) @@ -39,6 +48,50 @@ import Safe.Coerce (coerce) -- Public API -------------------------------------------------------------------------------- +-- | A 'DependencyIndex' enriched to include the compiler versions supported by +-- | each package version as a dependency. +newtype CompilerIndex = CompilerIndex DependencyIndex + +derive instance Newtype CompilerIndex _ + +-- | Associate the compiler versions supported by each package version by +-- | inserting them as a range in the version's dependencies. +buildCompilerIndex :: NonEmptyArray Version -> ManifestIndex -> Map PackageName Metadata -> CompilerIndex +buildCompilerIndex pursCompilers index metadata = CompilerIndex do + let + purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + + getDependencies (Manifest manifest) = fromMaybe manifest.dependencies do + Metadata { published } <- Map.lookup manifest.name metadata + { compilers: eitherCompilers } <- Map.lookup manifest.version published + -- If the dependency hasn't yet had all compilers computed for it, + -- then we don't add it to the dependencies to avoid over- + -- constraining the solver. + compilers <- Either.hush eitherCompilers + -- Otherwise, we construct a maximal range for the compilers the + -- indicated package version supports. + let + min = Foldable1.minimum compilers + max = Version.bumpPatch $ Foldable1.maximum compilers + pursRange <- Range.mk min max + pure $ Map.insert purs pursRange manifest.dependencies + + newPurs version = Map.singleton purs (Map.singleton version Map.empty) + pursVersions = Array.foldl (\acc compiler -> Map.unionWith Map.union (newPurs compiler) acc) Map.empty (NonEmptyArray.toArray pursCompilers) + dependencyIndex = map (map getDependencies) (ManifestIndex.toMap index) + + Map.unionWith Map.union pursVersions dependencyIndex + +-- | Solve the given dependencies using a dependency index that includes compiler +-- | versions, such that the solution prunes results that would fall outside +-- | a compiler range accepted by all dependencies. +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler pursRange (CompilerIndex index) required = do + let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } + let pursVersion = Map.lookup purs results + pure $ Tuple pursVersion $ Map.delete purs results + -- | Data from the registry index, listing dependencies for each version of -- | each package type DependencyIndex = Map PackageName (Map Version (Map PackageName Range)) @@ -146,6 +199,7 @@ intersectionFromRange' package range = -------------------------------------------------------------------------------- type SolverErrors = NEL.NonEmptyList SolverError + data SolverError = Conflicts (Map PackageName Intersection) | WhileSolving PackageName (Map Version SolverError) diff --git a/lib/test/Registry/ManifestIndex.purs b/lib/test/Registry/ManifestIndex.purs index c37d6875a..18e0863ef 100644 --- a/lib/test/Registry/ManifestIndex.purs +++ b/lib/test/Registry/ManifestIndex.purs @@ -74,8 +74,8 @@ spec = do manifest1 = unsafeManifest "prelude" "1.0.0" [] manifest2 = Newtype.over Manifest (_ { description = Just "My prelude description." }) manifest1 index = - ManifestIndex.insert manifest1 ManifestIndex.empty - >>= ManifestIndex.insert manifest2 + ManifestIndex.insert ManifestIndex.ConsiderRanges manifest1 ManifestIndex.empty + >>= ManifestIndex.insert ManifestIndex.ConsiderRanges manifest2 case index of Left errors -> @@ -103,17 +103,20 @@ spec = do tinyIndex :: Array Manifest tinyIndex = [ unsafeManifest "prelude" "1.0.0" [] ] - testIndex { satisfied: tinyIndex, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied: tinyIndex, unsatisfied: [] } Spec.it "Fails to parse non-self-contained index" do let - satisfied :: Array Manifest - satisfied = + satisfiedStrict :: Array Manifest + satisfiedStrict = [ unsafeManifest "prelude" "1.0.0" [] , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] - -- It is OK for the version bounds to not exist, although we may - -- choose to make this more strict in the future. - , unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] + ] + + -- Packages with dependencies that exist, but not at the proper bounds. + satisfiedLoose :: Array Manifest + satisfiedLoose = satisfiedStrict <> + [ unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] ] unsatisfied :: Array Manifest @@ -121,7 +124,8 @@ spec = do [ unsafeManifest "control" "3.0.0" [ Tuple "tuples" ">=2.0.0 <3.0.0" ] ] - testIndex { satisfied, unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: satisfiedStrict, unsatisfied } + testIndex ManifestIndex.IgnoreRanges { satisfied: satisfiedLoose, unsatisfied } Spec.it "Parses cyclical but acceptable index" do let @@ -133,7 +137,7 @@ spec = do , unsafeManifest "control" "2.0.0" [] ] - testIndex { satisfied, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied, unsatisfied: [] } Spec.it "Does not parse unacceptable cyclical index" do let @@ -143,7 +147,7 @@ spec = do , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] ] - testIndex { satisfied: [], unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: [], unsatisfied } contextEntry :: String contextEntry = @@ -155,9 +159,10 @@ contextEntry = testIndex :: forall m . MonadThrow Error m - => { satisfied :: Array Manifest, unsatisfied :: Array Manifest } + => ManifestIndex.IncludeRanges + -> { satisfied :: Array Manifest, unsatisfied :: Array Manifest } -> m Unit -testIndex { satisfied, unsatisfied } = case ManifestIndex.maximalIndex (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of +testIndex consider { satisfied, unsatisfied } = case ManifestIndex.maximalIndex consider (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of Tuple errors result -> do let { fail: shouldHaveErrors } = diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index eff61e185..08c12d887 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -25,24 +25,31 @@ recordStudio = "published": { "0.1.0": { "bytes": 3438, + "compilers": "0.13.0", "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", "publishedTime": "2021-03-27T10:03:46.000Z", "ref": "v0.1.0" }, "0.2.1": { "bytes": 3365, + "compilers": "0.13.0", "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", "publishedTime": "2022-05-15T10:51:57.000Z", "ref": "v0.2.1" }, "1.0.0": { "bytes": 5155, + "compilers": "0.13.0", "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", "publishedTime": "2022-11-03T17:30:28.000Z", "ref": "v1.0.0" }, "1.0.1": { "bytes": 5635, + "compilers": [ + "0.13.0", + "0.13.1" + ], "hash": "sha256-Xm9pwDBHW5zYUEzxfVSgjglIcwRI1gcCOmcpyQ/tqeY=", "publishedTime": "2022-11-04T12:21:09.000Z", "ref": "v1.0.1" diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index 2e5cb47aa..338fe4266 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -63,8 +63,9 @@ spec = do now = unsafeDateTime "2022-12-12T12:00:00.000Z" outOfRange = unsafeDateTime "2022-12-10T11:00:00.000Z" inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" + compilers = Left (unsafeVersion "0.13.0") - publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, ref: "" } + publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers, ref: "" } metadata = Metadata { location: defaultLocation diff --git a/lib/test/Registry/Solver.purs b/lib/test/Registry/Solver.purs index bfc0e31b9..a45cf92f9 100644 --- a/lib/test/Registry/Solver.purs +++ b/lib/test/Registry/Solver.purs @@ -7,18 +7,19 @@ import Data.Either (Either(..)) import Data.Foldable (for_) import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList -import Data.Map (Map) +import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map -import Data.Maybe (Maybe(..)) -import Data.Newtype (wrap) +import Data.Maybe (Maybe(..), fromMaybe') +import Data.Newtype (un, wrap) import Data.Semigroup.Foldable (intercalateMap) import Data.Set as Set import Data.Set.NonEmpty as NES import Data.Tuple (Tuple(..)) import Data.Tuple.Nested ((/\)) +import Partial.Unsafe (unsafeCrashWith) import Registry.PackageName as PackageName import Registry.Range as Range -import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), printSolverError, solve) +import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), initializeRegistry, initializeRequired, lowerBound, printSolverError, solve, solveSeed, solveSteps, upperBound) import Registry.Test.Assert as Assert import Registry.Test.Utils (fromRight) import Registry.Types (PackageName, Range, Version) @@ -31,6 +32,11 @@ spec = do shouldSucceed goals result = pure unit >>= \_ -> solve solverIndex (Map.fromFoldable goals) `Assert.shouldContain` (Map.fromFoldable result) + shouldSucceedSteps goals result = pure unit >>= \_ -> do + let solved = solveSteps (solveSeed { registry: initializeRegistry solverIndex, required: initializeRequired (Map.fromFoldable goals) }) + let toRange intersect = fromMaybe' (\_ -> unsafeCrashWith "Bad intersection") (Range.mk (lowerBound intersect) (upperBound intersect)) + map toRange (un SemigroupMap solved.required) `Assert.shouldEqual` Map.fromFoldable result + shouldFail goals errors = pure unit >>= \_ -> case solve solverIndex (Map.fromFoldable goals) of Left solverErrors -> do let expectedErrorCount = Array.length errors @@ -103,6 +109,22 @@ spec = do , prelude.package /\ version 1 ] + Spec.describe "Single-step expands bounds" do + Spec.it "Simple range" do + shouldSucceedSteps + [ simple.package /\ range 0 1 ] + [ simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + + Spec.it "Multi-version range" do + shouldSucceedSteps + [ simple.package /\ range 0 2 ] + [ simple.package /\ range 0 2, prelude.package /\ range 0 2 ] + + Spec.it "Transitive" do + shouldSucceedSteps + [ onlySimple.package /\ range 0 1 ] + [ onlySimple.package /\ range 0 1, simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + Spec.describe "Valid dependency ranges containing some invalid versions solve" do Spec.it "Proceeds past broken ranges to find a later valid range" do -- 'broken-fixed' cannot be solved at the broken version 0, but it can be diff --git a/nix/test-vm.nix b/nix/test-vm.nix index 916866579..f77ef574a 100644 --- a/nix/test-vm.nix +++ b/nix/test-vm.nix @@ -19,10 +19,6 @@ services.getty.autologinUser = "root"; virtualisation = { - graphics = false; - host = { - inherit pkgs; - }; forwardPorts = [ { from = "host"; @@ -30,8 +26,14 @@ host.port = 8080; } ]; + graphics = false; + host = { + inherit pkgs; + }; + # Can be adjusted if necessary for test systems (default is 1024) + memorySize = 2048; }; - system.stateVersion = "23.11"; + system.stateVersion = "24.05"; }; } diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index a9f0079b5..fade3c9b0 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -12,26 +12,37 @@ import ArgParse.Basic (ArgParser) import ArgParse.Basic as Arg import Control.Apply (lift2) import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Compactable (separate) +import Data.DateTime (Date, Month(..)) +import Data.DateTime as DateTime +import Data.Enum (toEnum) import Data.Exists as Exists import Data.Filterable (partition) import Data.Foldable (foldMap) import Data.Foldable as Foldable import Data.Formatter.DateTime as Formatter.DateTime +import Data.Function (on) import Data.FunctorWithIndex (mapWithIndex) import Data.List as List +import Data.List.NonEmpty as NonEmptyList import Data.Map as Map import Data.Ordering (invert) import Data.Profunctor as Profunctor import Data.Set as Set +import Data.Set.NonEmpty (NonEmptySet) +import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits +import Data.These (These(..)) +import Data.Tuple (uncurry) import Data.Variant as Variant import Effect.Class.Console as Console +import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process import Parsing (Parser) @@ -40,39 +51,49 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic +import Registry.App.API (COMPILER_CACHE) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry as Registry import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage import Registry.App.Legacy.LenientVersion (LenientVersion) import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestValidationError) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) +import Registry.App.Manifest.SpagoYaml as SpagoYaml import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec (packageMap, versionMap) +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.Location as Location import Registry.Manifest as Manifest import Registry.ManifestIndex as ManifestIndex -import Registry.Operation (PublishData) import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver (CompilerIndex(..)) +import Registry.Solver as Solver import Registry.Version as Version -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Run.Except -import Spago.Generated.BuildInfo as BuildInfo import Type.Proxy (Proxy(..)) data ImportMode = DryRun | GenerateRegistry | UpdateRegistry @@ -127,7 +148,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) GenerateRegistry -> do @@ -138,7 +159,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash (Registry.CommitAs (Git.pacchettibottiCommitter token)))) >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) UpdateRegistry -> do @@ -149,7 +170,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.ForceClean (Registry.CommitAs (Git.pacchettibottiCommitter token)))) >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret (Pursuit.handleAff token) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Recent) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) -- Logging setup @@ -161,18 +182,19 @@ main = launchAff_ do logFile = "legacy-importer-" <> String.take 19 (Formatter.DateTime.format Internal.Format.iso8601DateTime now) <> ".log" logPath = Path.concat [ logDir, logFile ] - runLegacyImport mode logPath + runLegacyImport logPath # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' -runLegacyImport :: forall r. ImportMode -> FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit -runLegacyImport mode logs = do +runLegacyImport :: forall r. FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit +runLegacyImport logs = do Log.info "Starting legacy import!" Log.info $ "Logs available at " <> logs @@ -204,108 +226,300 @@ runLegacyImport mode logs = do pure $ fixupNames allPackages Log.info $ "Read " <> show (Set.size (Map.keys legacyRegistry)) <> " package names from the legacy registry." + + Log.info "Reading reserved 0.13 packages..." + reserved0_13 <- readPackagesMetadata >>= case _ of + Left err -> do + Log.warn $ "Could not read reserved packages: " <> err + Log.warn $ "Determining reserved packages..." + metadata <- getPackagesMetadata legacyRegistry + let cutoff = filterPackages_0_13 metadata + writePackagesMetadata cutoff + pure cutoff + Right cutoff -> pure cutoff + + Log.info $ "Reserving metadata files for 0.13 and purs/metadata packages" + forWithIndex_ reserved0_13 \package { address } -> Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for reserved 0.13 package " <> PackageName.print package + let location = GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> Log.debug $ PackageName.print package <> " already reserved." + + let metadataPackage = unsafeFromRight (PackageName.parse "metadata") + let pursPackage = unsafeFromRight (PackageName.parse "purs") + for_ [ metadataPackage, pursPackage ] \package -> + Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for " <> PackageName.print package + let location = GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print package, subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> pure unit + importedIndex <- importLegacyRegistry legacyRegistry Log.info "Writing package and version failures to disk..." Run.liftAff $ writePackageFailures importedIndex.failedPackages Run.liftAff $ writeVersionFailures importedIndex.failedVersions - Log.info "Writing empty metadata files for legacy packages that can't be registered..." - void $ forWithIndex importedIndex.reservedPackages \package location -> do - Registry.readMetadata package >>= case _ of - Nothing -> do - let metadata = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } - Registry.writeMetadata package metadata - Just _ -> pure unit - Log.info "Ready for upload!" - - Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex + let importStats = calculateImportStats legacyRegistry importedIndex + let formattedStats = formatImportStats importStats + Log.info formattedStats + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "import-stats.txt" ]) formattedStats Log.info "Sorting packages for upload..." - let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.IgnoreRanges importedIndex.registryIndex - - Log.info "Removing packages that previously failed publish" - indexPackages <- allIndexPackages # Array.filterA \(Manifest { name, version }) -> - isNothing <$> Cache.get _importCache (PublishFailure name version) + let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges importedIndex.registryIndex + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "sorted-packages.txt" ]) $ String.joinWith "\n" $ map (\(Manifest { name, version }) -> PackageName.print name <> "@" <> Version.print version) allIndexPackages - allMetadata <- Registry.readAllMetadata - - -- This record comes from the build directory (.spago) and records information - -- from the most recent build. - let compiler = unsafeFromRight (Version.parse BuildInfo.pursVersion) - - -- Just a safety check to ensure the compiler used in the pipeline is not too - -- low. Should be bumped from time to time to the latest compiler. - let minCompiler = unsafeFromRight (Version.parse "0.15.7") - when (compiler < minCompiler) do - Run.Except.throw $ "Local compiler " <> Version.print compiler <> " is too low (min: " <> Version.print minCompiler <> ")." - - Log.info $ "Using compiler " <> Version.print compiler + Log.info "Removing packages that previously failed publish or have been published" + publishable <- do + allMetadata <- Registry.readAllMetadata + allIndexPackages # Array.filterA \(Manifest { name, version }) -> do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure $ not $ hasMetadata allMetadata name version + Just _ -> pure false + + allCompilers <- PursVersions.pursVersions + allCompilersRange <- case Range.mk (NonEmptyArray.head allCompilers) (Version.bumpPatch (NonEmptyArray.last allCompilers)) of + Nothing -> Run.Except.throw $ "Failed to construct a compiler range from " <> Version.print (NonEmptyArray.head allCompilers) <> " and " <> Version.print (NonEmptyArray.last allCompilers) + Just range -> do + Log.info $ "All available compilers range: " <> Range.print range + pure range let - isPublished { name, version } = hasMetadata allMetadata name version - notPublished = indexPackages # Array.filter \(Manifest manifest) -> not (isPublished manifest) - - mkOperation :: Manifest -> Run _ PublishData - mkOperation (Manifest manifest) = - case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of - Nothing -> do - let formatted = formatPackageVersion manifest.name manifest.version - Log.error $ "Unable to recover package ref for " <> formatted - Run.Except.throw $ "Failed to create publish operation for " <> formatted - Just ref -> - pure - { location: Just manifest.location - , name: manifest.name - , ref: un RawVersion ref - , compiler - , resolutions: Nothing - } - - case notPublished of + publishLegacyPackage :: Solver.TransitivizedRegistry -> Manifest -> Run _ Unit + publishLegacyPackage legacyIndex (Manifest manifest) = do + let formatted = formatPackageVersion manifest.name manifest.version + Log.info $ "\n----------\nPUBLISHING: " <> formatted <> "\n----------\n" + RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of + Nothing -> Run.Except.throw $ "Unable to recover package ref for " <> formatted + Just ref -> pure ref + + Log.debug "Building dependency index with compiler versions..." + compilerIndex <- API.readCompilerIndex + + Log.debug $ "Solving dependencies for " <> formatted + eitherResolutions <- do + let toErrors = map Solver.printSolverError <<< NonEmptyList.toUnfoldable + let isCompilerSolveError = String.contains (String.Pattern "Conflict in version ranges for purs:") + let partitionIsCompiler = partitionEithers <<< map (\error -> if isCompilerSolveError error then Right error else Left error) + + legacySolution <- case Solver.solveFull { registry: legacyIndex, required: Solver.initializeRequired manifest.dependencies } of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with legacy index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right resolutions -> do + Log.debug $ "Solved " <> formatted <> " with legacy index." + -- The solutions do us no good if the dependencies don't exist. Note + -- the compiler index is updated on every publish. + let lookupInRegistry res = maybe (Left res) (\_ -> Right res) (Map.lookup (fst res) (un CompilerIndex compilerIndex) >>= Map.lookup (snd res)) + let { fail: notRegistered } = partitionEithers $ map lookupInRegistry $ Map.toUnfoldable resolutions + if (Array.null notRegistered) then + pure $ Right resolutions + else do + let missing = "Some resolutions from legacy index are not registered: " <> String.joinWith ", " (map (uncurry formatPackageVersion) notRegistered) + Log.warn missing + Log.warn "Not using legacy index resolutions for this package." + pure $ Left $ SolveFailedDependencies missing + + currentSolution <- case Solver.solveWithCompiler allCompilersRange compilerIndex manifest.dependencies of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with current index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right (Tuple _ resolutions) -> do + Log.debug $ "Solved " <> formatted <> " with contemporary index." + pure $ Right resolutions + + pure $ case legacySolution, currentSolution of + Left err, Left _ -> Left err + Right resolutions, Left _ -> Right $ This resolutions + Left _, Right resolutions -> Right $ That resolutions + Right legacyResolutions, Right currentResolutions -> Right $ Both legacyResolutions currentResolutions + + case eitherResolutions of + -- We skip if we couldn't solve (but we write the error to cache). + Left err -> + Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right resolutionOptions -> do + Log.info "Selecting usable compiler from resolutions..." + + let + findFirstFromResolutions :: Map PackageName Version -> Run _ (Either (Map Version CompilerFailure) Version) + findFirstFromResolutions resolutions = do + Log.debug $ "Finding compiler for " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + possibleCompilers <- + if Map.isEmpty manifest.dependencies then do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + else do + Log.debug "No compiler version was produced by the solver, so all compilers are potentially compatible." + allMetadata <- Registry.readAllMetadata + case compatibleCompilers allMetadata resolutions of + Left [] -> do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + Left errors -> do + let + printError { packages, compilers } = do + let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages + let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers + key <> " support compilers " <> val + Log.warn $ Array.fold + [ "Resolutions admit no overlapping compiler versions:\n" + , Array.foldMap (append "\n - " <<< printError) errors + ] + pure $ NonEmptySet.fromFoldable1 allCompilers + Right compilers -> do + Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CJ.array Version.codec) (NonEmptySet.toUnfoldable compilers) + pure compilers + + cached <- do + cached <- for (NonEmptySet.toUnfoldable possibleCompilers) \compiler -> + Cache.get API._compilerCache (API.Compilation (Manifest manifest) resolutions compiler) >>= case _ of + Nothing -> pure Nothing + Just { result: Left _ } -> pure Nothing + Just { target, result: Right _ } -> pure $ Just target + pure $ NonEmptyArray.fromArray $ Array.catMaybes cached + + case cached of + Just prev -> do + let selected = NonEmptyArray.last prev + Log.debug $ "Found successful cached compilation for " <> formatted <> " and chose " <> Version.print selected + pure $ Right selected + Nothing -> do + Log.debug $ "No cached compilation for " <> formatted <> ", so compiling with all compilers to find first working one." + Log.debug "Fetching source and installing dependencies to test compilers" + tmp <- Tmp.mkTmpDir + { path } <- Source.fetch tmp manifest.location ref + Log.debug $ "Downloaded source to " <> path + Log.debug "Downloading dependencies..." + let installDir = Path.concat [ tmp, ".registry" ] + FS.Extra.ensureDirectory installDir + API.installBuildPlan resolutions installDir + Log.debug $ "Installed to " <> installDir + Log.debug "Trying compilers one-by-one..." + selected <- findFirstCompiler + { source: path + , installed: installDir + , compilers: NonEmptySet.toUnfoldable possibleCompilers + , resolutions + , manifest: Manifest manifest + } + FS.Extra.remove tmp + pure selected + + let + collectCompilerErrors :: Map Version CompilerFailure -> Map (NonEmptyArray Version) CompilerFailure + collectCompilerErrors failures = do + let + foldFn prev xs = do + let Tuple _ failure = NonEmptyArray.head xs + let key = map fst xs + Map.insert key failure prev + Array.foldl foldFn Map.empty $ Array.groupAllBy (compare `on` snd) (Map.toUnfoldable failures) + + reportFailures :: forall a. _ -> Run _ (Either PublishError a) + reportFailures failures = do + let collected = collectCompilerErrors failures + Log.error $ "Failed to find any valid compilers for publishing:\n" <> printJson compilerFailureMapCodec collected + pure $ Left $ NoCompilersFound collected + + -- Here, we finally attempt to find a suitable compiler. If we only + -- got one set of working resolutions that's what we use. If we got + -- solutions with both the legacy and adjusted-manifest indices, then + -- we try the adjusted index first since that's what is used in the + -- publish pipeline. + eitherCompiler <- case resolutionOptions of + This legacyResolutions -> do + selected <- findFirstFromResolutions legacyResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + That currentResolutions -> do + selected <- findFirstFromResolutions currentResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Both legacyResolutions currentResolutions -> do + selectedCurrent <- findFirstFromResolutions currentResolutions + case selectedCurrent of + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Left currentFailures | legacyResolutions == currentResolutions -> reportFailures currentFailures + Left _ -> do + Log.info $ "Could not find suitable compiler from current index, trying legacy solution..." + selectedLegacy <- findFirstFromResolutions legacyResolutions + case selectedLegacy of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + + case eitherCompiler of + Left err -> Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right (Tuple compiler resolutions) -> do + Log.debug $ "Selected " <> Version.print compiler <> " for publishing." + let + payload = + { name: manifest.name + , location: Just manifest.location + , ref + , compiler + , resolutions: Just resolutions + } + Run.Except.runExcept (API.publish (Just legacyIndex) payload) >>= case _ of + Left error -> do + Log.error $ "Failed to publish " <> formatted <> ": " <> error + Cache.put _importCache (PublishFailure manifest.name manifest.version) (PublishError error) + Right _ -> do + Log.info $ "Published " <> formatted + + case publishable of [] -> Log.info "No packages to publish." manifests -> do - let printPackage (Manifest { name, version }) = formatPackageVersion name version Log.info $ Array.foldMap (append "\n") [ "----------" , "AVAILABLE TO PUBLISH" - , "" - , " using purs " <> Version.print compiler - , "" + , Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) manifests , "----------" - , Array.foldMap (append "\n - " <<< printPackage) manifests ] - let - source = case mode of - DryRun -> LegacyPackage - GenerateRegistry -> LegacyPackage - UpdateRegistry -> CurrentPackage - - void $ for notPublished \(Manifest manifest) -> do - let formatted = formatPackageVersion manifest.name manifest.version - Log.info $ Array.foldMap (append "\n") - [ "----------" - , "PUBLISHING: " <> formatted - , stringifyJson Location.codec manifest.location - , "----------" - ] - operation <- mkOperation (Manifest manifest) - - result <- Run.Except.runExcept $ API.publish source operation - -- TODO: Some packages will fail because the legacy importer does not - -- perform all the same validation checks that the publishing flow does. - -- What should we do when a package has a valid manifest but fails for - -- other reasons? Should they be added to the package validation - -- failures and we defer writing the package failures until the import - -- has completed? - case result of - Left error -> do - Log.error $ "Failed to publish " <> formatted <> ": " <> error - Cache.put _importCache (PublishFailure manifest.name manifest.version) error - Right _ -> do - Log.info $ "Published " <> formatted + legacyIndex <- do + Log.info "Transitivizing legacy registry..." + pure + $ Solver.exploreAllTransitiveDependencies + $ Solver.initializeRegistry + $ map (map (un Manifest >>> _.dependencies)) (ManifestIndex.toMap importedIndex.registryIndex) + + void $ for manifests (publishLegacyPackage legacyIndex) + + Log.info "Finished publishing! Collecting all publish failures and writing to disk." + let + collectError prev (Manifest { name, version }) = do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure prev + Just error -> pure $ Map.insertWith Map.union name (Map.singleton version error) prev + failures <- Array.foldM collectError Map.empty allIndexPackages + Run.liftAff $ writePublishFailures failures + + let publishStats = collectPublishFailureStats importStats (map _.address reserved0_13) importedIndex.registryIndex failures + let publishStatsMessage = formatPublishFailureStats publishStats + Log.info publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "reserved-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable publishStats.packages.reserved))) + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "removed-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable (Set.difference publishStats.packages.failed publishStats.packages.reserved)))) + +-- | Record all package failures to the 'package-failures.json' file. +writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit +writePublishFailures = + writeJsonFile (packageMap (versionMap jsonValidationErrorCodec)) (Path.concat [ scratchDir, "publish-failures.json" ]) + <<< map (map formatPublishError) -- | Record all package failures to the 'package-failures.json' file. writePackageFailures :: Map RawPackageName PackageValidationError -> Aff Unit @@ -324,7 +538,7 @@ type LegacyRegistry = Map RawPackageName String type ImportedIndex = { failedPackages :: Map RawPackageName PackageValidationError , failedVersions :: Map RawPackageName (Map RawVersion VersionValidationError) - , reservedPackages :: Map PackageName Location + , removedPackages :: Map PackageName Location , registryIndex :: ManifestIndex , packageRefs :: Map PackageName (Map Version RawVersion) } @@ -362,16 +576,15 @@ importLegacyRegistry legacyRegistry = do -- A 'checked' index is one where we have verified that all dependencies -- are self-contained within the registry. - Tuple unsatisfied validIndex = ManifestIndex.maximalIndex validLegacyManifests + Tuple unsatisfied validIndex = ManifestIndex.maximalIndex ManifestIndex.ConsiderRanges validLegacyManifests -- The list of all packages that were present in the legacy registry files, - -- but which have no versions present in the fully-imported registry. These - -- packages still need to have empty metadata files written for them. - reservedPackages :: Map PackageName Location - reservedPackages = - Map.fromFoldable $ Array.mapMaybe reserved $ Map.toUnfoldable legacyRegistry + -- but which have no versions present in the fully-imported registry. + removedPackages :: Map PackageName Location + removedPackages = + Map.fromFoldable $ Array.mapMaybe removed $ Map.toUnfoldable legacyRegistry where - reserved (Tuple (RawPackageName name) address) = do + removed (Tuple (RawPackageName name) address) = do packageName <- hush $ PackageName.parse name guard $ isNothing $ Map.lookup packageName $ ManifestIndex.toMap validIndex { owner, repo } <- hush $ Parsing.runParser address legacyRepoParser @@ -403,7 +616,7 @@ importLegacyRegistry legacyRegistry = do pure { failedPackages: packageFailures , failedVersions: versionFailures - , reservedPackages: reservedPackages + , removedPackages: removedPackages , registryIndex: validIndex , packageRefs } @@ -428,17 +641,19 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa buildManifestForVersion :: Tag -> Run _ (Either VersionValidationError Manifest) buildManifestForVersion tag = Run.Except.runExceptAt _exceptVersion do version <- exceptVersion $ validateVersion tag - - -- TODO: This will use the manifest for the package version from the - -- registry, without trying to produce a legacy manifest. However, we may - -- want to always attempt to produce a legacy manifest. If we can produce - -- one we compare it to the existing entry, failing if there is a - -- difference; if we can't, we warn and fall back to the existing entry. - Registry.readManifest package.name (LenientVersion.version version) >>= case _ of + Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of + Just cached -> exceptVersion cached Nothing -> do - Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of + -- While technically not 'legacy', we do need to handle packages with + -- spago.yaml files because they've begun to pop up since the registry + -- alpha began and we don't want to drop them when doing a re-import. + fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of + Just manifest -> do + Log.debug $ "Built manifest from discovered spago.yaml file." + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) + pure manifest Nothing -> do - Log.debug $ "Building manifest in legacy import because it was not found in cache: " <> formatPackageVersion package.name (LenientVersion.version version) + Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) manifest <- Run.Except.runExceptAt _exceptVersion do exceptVersion $ validateVersionDisabled package.name version legacyManifest <- do @@ -446,13 +661,11 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } Right result -> pure result pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest exceptVersion manifest - Just cached -> - exceptVersion cached - - Just manifest -> - exceptVersion $ Right manifest manifests <- for package.tags \tag -> do manifest <- buildManifestForVersion tag @@ -460,6 +673,145 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa pure $ Map.fromFoldable manifests +data PublishError + = SolveFailedDependencies String + | SolveFailedCompiler String + | NoCompilersFound (Map (NonEmptyArray Version) CompilerFailure) + | UnsolvableDependencyCompilers (Array GroupedByCompilers) + | PublishError String + +derive instance Eq PublishError + +publishErrorCodec :: CJ.Codec PublishError +publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CJ.Variant.variantMatch + { solveFailedCompiler: Right CJ.string + , solveFailedDependencies: Right CJ.string + , noCompilersFound: Right compilerFailureMapCodec + , unsolvableDependencyCompilers: Right (CJ.array groupedByCompilersCodec) + , publishError: Right CJ.string + } + where + toVariant = case _ of + SolveFailedDependencies error -> Variant.inj (Proxy :: _ "solveFailedDependencies") error + SolveFailedCompiler error -> Variant.inj (Proxy :: _ "solveFailedCompiler") error + NoCompilersFound failed -> Variant.inj (Proxy :: _ "noCompilersFound") failed + UnsolvableDependencyCompilers group -> Variant.inj (Proxy :: _ "unsolvableDependencyCompilers") group + PublishError error -> Variant.inj (Proxy :: _ "publishError") error + + fromVariant = Variant.match + { solveFailedDependencies: SolveFailedDependencies + , solveFailedCompiler: SolveFailedCompiler + , noCompilersFound: NoCompilersFound + , unsolvableDependencyCompilers: UnsolvableDependencyCompilers + , publishError: PublishError + } + +type PublishFailureStats = + { packages :: { total :: Int, considered :: Int, partial :: Int, failed :: Set PackageName, reserved :: Set PackageName } + , versions :: { total :: Int, considered :: Int, failed :: Int, reason :: Map String Int } + } + +collectPublishFailureStats :: ImportStats -> Map PackageName Address -> ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats +collectPublishFailureStats importStats reserved0_13 importedIndex failures = do + let + index :: Map PackageName (Map Version Manifest) + index = ManifestIndex.toMap importedIndex + + countVersions :: forall a. Map PackageName (Map Version a) -> Int + countVersions = Array.foldl (\prev (Tuple _ versions) -> prev + Map.size versions) 0 <<< Map.toUnfoldable + + startPackages :: Int + startPackages = importStats.packagesProcessed + + consideredPackages :: Int + consideredPackages = Map.size index + + startVersions :: Int + startVersions = importStats.versionsProcessed + + consideredVersions :: Int + consideredVersions = countVersions index + + failedPackages :: Int + failedPackages = Map.size failures + + failedVersions :: Int + failedVersions = countVersions failures + + removedPackages :: Set PackageName + removedPackages = do + let + foldFn package prev versions = fromMaybe prev do + allVersions <- Map.lookup package index + guard (Map.keys allVersions == Map.keys versions) + pure $ Set.insert package prev + + foldlWithIndex foldFn Set.empty failures + + -- Packages that are eligible for removal — but are reserved due to 0.13 or + -- organization status — are the 'reserved packages'. + reservedPackages :: Set PackageName + reservedPackages = Set.intersection removedPackages (Map.keys reserved0_13) + + countByFailure :: Map String Int + countByFailure = do + let + toKey = case _ of + SolveFailedDependencies _ -> "Solving failed (dependencies)" + SolveFailedCompiler _ -> "Solving failed (compiler)" + NoCompilersFound _ -> "No compilers usable for publishing" + UnsolvableDependencyCompilers _ -> "Dependency compiler conflict" + PublishError _ -> "Publishing failed" + + foldFn prev (Tuple _ versions) = + Array.foldl (\prevCounts (Tuple _ error) -> Map.insertWith (+) (toKey error) 1 prevCounts) prev (Map.toUnfoldable versions) + + Array.foldl foldFn Map.empty (Map.toUnfoldable failures) + + { packages: + { total: startPackages + , considered: consideredPackages + , partial: failedPackages + , reserved: reservedPackages + , failed: removedPackages + } + , versions: + { total: startVersions + , considered: consideredVersions + , failed: failedVersions + , reason: countByFailure + } + } + +formatPublishFailureStats :: PublishFailureStats -> String +formatPublishFailureStats { packages, versions } = String.joinWith "\n" + [ "--------------------" + , "PUBLISH FAILURES" + , "--------------------" + , "" + , show packages.considered <> " of " <> show packages.total <> " total packages were considered for publishing (others had no manifests imported.)" + , " - " <> show (packages.total - packages.partial - (Set.size packages.failed)) <> " out of " <> show packages.considered <> " packages fully succeeded." + , " - " <> show packages.partial <> " packages partially succeeded." + , " - " <> show (Set.size packages.reserved) <> " packages fully failed, but are reserved due to 0.13 or organization status." + , " - " <> show (Set.size packages.failed - Set.size packages.reserved) <> " packages had all versions fail and will be removed." + , "" + , show versions.considered <> " of " <> show versions.total <> " total versions were considered for publishing.\n - " <> show versions.failed <> " out of " <> show versions.total <> " versions failed." + , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable versions.reason)) + ] + +compilerFailureMapCodec :: CJ.Codec (Map (NonEmptyArray Version) CompilerFailure) +compilerFailureMapCodec = do + let + print = NonEmptyArray.intercalate "," <<< map Version.print + parse input = do + let versions = String.split (String.Pattern ",") input + let { fail, success } = partitionEithers $ map Version.parse versions + case NonEmptyArray.fromArray success of + Nothing | Array.null fail -> Left "No versions" + Nothing -> Left $ "No versions parsed, some failed: " <> String.joinWith ", " fail + Just result -> pure result + Internal.Codec.strMap "CompilerFailureMap" parse print compilerFailureCodec + type EXCEPT_VERSION :: Row (Type -> Type) -> Row (Type -> Type) type EXCEPT_VERSION r = (exceptVersion :: Run.Except.Except VersionValidationError | r) @@ -569,6 +921,56 @@ type PackageResult = , tags :: Array Tag } +type PackagesMetadata = { address :: Address, lastPublished :: Date } + +packagesMetadataCodec :: CJ.Codec PackagesMetadata +packagesMetadataCodec = CJ.named "PackagesMetadata" $ CJ.Record.object + { address: CJ.named "Address" $ CJ.Record.object { owner: CJ.string, repo: CJ.string } + , lastPublished: Internal.Codec.iso8601Date + } + +getPackagesMetadata :: forall r. Map RawPackageName String -> Run (EXCEPT String + GITHUB + r) (Map PackageName PackagesMetadata) +getPackagesMetadata legacyRegistry = do + associated <- for (Map.toUnfoldableUnordered legacyRegistry) \(Tuple rawName rawUrl) -> do + Run.Except.runExceptAt (Proxy :: _ "exceptPackage") (validatePackage rawName rawUrl) >>= case _ of + Left _ -> pure Nothing + Right { name, address, tags } -> case Array.head tags of + Nothing -> pure Nothing + Just tag -> do + result <- GitHub.getCommitDate address tag.sha + case result of + Left error -> unsafeCrashWith ("Failed to get commit date for " <> PackageName.print name <> "@" <> tag.name <> ": " <> Octokit.printGitHubError error) + Right date -> pure $ Just $ Tuple name { address, lastPublished: DateTime.date date } + pure $ Map.fromFoldable $ Array.catMaybes associated + +filterPackages_0_13 :: Map PackageName PackagesMetadata -> Map PackageName PackagesMetadata +filterPackages_0_13 = do + let + -- 0.13 release date + cutoff = DateTime.canonicalDate (unsafeFromJust (toEnum 2019)) May (unsafeFromJust (toEnum 29)) + organizations = + [ "purescript" + , "purescript-contrib" + , "purescript-node" + , "purescript-web" + , "rowtype-yoga" + , "purescript-halogen" + , "purescript-deprecated" + ] + + Map.filterWithKey \_ metadata -> do + let { owner } = metadata.address + owner `Array.elem` organizations || metadata.lastPublished >= cutoff + +writePackagesMetadata :: forall r. Map PackageName PackagesMetadata -> Run (LOG + AFF + r) Unit +writePackagesMetadata pkgs = do + let path = Path.concat [ scratchDir, "packages-metadata.json" ] + Log.info $ "Writing packages metadata to " <> path + Run.liftAff $ writeJsonFile (packageMap packagesMetadataCodec) path pkgs + +readPackagesMetadata :: forall r. Run (AFF + r) (Either String (Map PackageName PackagesMetadata)) +readPackagesMetadata = Run.liftAff $ readJsonFile (packageMap packagesMetadataCodec) (Path.concat [ scratchDir, "packages-metadata.json" ]) + validatePackage :: forall r. RawPackageName -> String -> Run (GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) PackageResult validatePackage rawPackage rawUrl = do name <- exceptPackage $ validatePackageName rawPackage @@ -641,6 +1043,7 @@ validatePackageDisabled package = disabledPackages :: Map String String disabledPackages = Map.fromFoldable [ Tuple "metadata" reservedPackage + , Tuple "purs" reservedPackage , Tuple "bitstrings" noSrcDirectory , Tuple "purveyor" noSrcDirectory , Tuple "styled-components" noSrcDirectory @@ -661,14 +1064,14 @@ validatePackageName (RawPackageName name) = type JsonValidationError = { tag :: String - , value :: Maybe String + , value :: Maybe JSON , reason :: String } jsonValidationErrorCodec :: CJ.Codec JsonValidationError jsonValidationErrorCodec = CJ.named "JsonValidationError" $ CJ.Record.object { tag: CJ.string - , value: CJ.Record.optional CJ.string + , value: CJ.Record.optional CJ.json , reason: CJ.string } @@ -677,31 +1080,43 @@ formatPackageValidationError { error, reason } = case error of InvalidPackageName -> { tag: "InvalidPackageName", value: Nothing, reason } InvalidPackageURL url -> - { tag: "InvalidPackageURL", value: Just url, reason } + { tag: "InvalidPackageURL", value: Just (CJ.encode CJ.string url), reason } PackageURLRedirects { registered } -> - { tag: "PackageURLRedirects", value: Just (registered.owner <> "/" <> registered.repo), reason } + { tag: "PackageURLRedirects", value: Just (CJ.encode CJ.string (registered.owner <> "/" <> registered.repo)), reason } CannotAccessRepo address -> - { tag: "CannotAccessRepo", value: Just (address.owner <> "/" <> address.repo), reason } + { tag: "CannotAccessRepo", value: Just (CJ.encode CJ.string (address.owner <> "/" <> address.repo)), reason } DisabledPackage -> { tag: "DisabledPackage", value: Nothing, reason } formatVersionValidationError :: VersionValidationError -> JsonValidationError formatVersionValidationError { error, reason } = case error of InvalidTag tag -> - { tag: "InvalidTag", value: Just tag.name, reason } + { tag: "InvalidTag", value: Just (CJ.encode CJ.string tag.name), reason } DisabledVersion -> { tag: "DisabledVersion", value: Nothing, reason } InvalidManifest err -> do let errorValue = Legacy.Manifest.printLegacyManifestError err.error - { tag: "InvalidManifest", value: Just errorValue, reason } - UnregisteredDependencies names -> do - let errorValue = String.joinWith ", " $ map PackageName.print names - { tag: "UnregisteredDependencies", value: Just errorValue, reason } + { tag: "InvalidManifest", value: Just (CJ.encode CJ.string errorValue), reason } + UnregisteredDependencies names -> + { tag: "UnregisteredDependencies", value: Just (CJ.encode (CJ.array PackageName.codec) names), reason } + +formatPublishError :: PublishError -> JsonValidationError +formatPublishError = case _ of + SolveFailedCompiler error -> + { tag: "SolveFailedCompiler", value: Nothing, reason: error } + SolveFailedDependencies error -> + { tag: "SolveFailedDependencies", value: Nothing, reason: error } + NoCompilersFound versions -> + { tag: "NoCompilersFound", value: Just (CJ.encode compilerFailureMapCodec versions), reason: "No valid compilers found for publishing." } + UnsolvableDependencyCompilers failed -> + { tag: "UnsolvableDependencyCompilers", value: Just (CJ.encode (CJ.array groupedByCompilersCodec) failed), reason: "Resolved dependencies cannot compile together" } + PublishError error -> + { tag: "PublishError", value: Nothing, reason: error } type ImportStats = { packagesProcessed :: Int , versionsProcessed :: Int - , packageNamesReserved :: Int + , packageNamesRemoved :: Int , packageResults :: { success :: Int, partial :: Int, fail :: Int } , versionResults :: { success :: Int, fail :: Int } , packageErrors :: Map String Int @@ -714,7 +1129,7 @@ formatImportStats stats = String.joinWith "\n" , show stats.packagesProcessed <> " packages processed:" , indent $ show stats.packageResults.success <> " fully successful" , indent $ show stats.packageResults.partial <> " partially successful" - , indent $ show (stats.packageNamesReserved - stats.packageResults.fail) <> " reserved (no usable versions)" + , indent $ show (stats.packageNamesRemoved - stats.packageResults.fail) <> " omitted (no usable versions)" , indent $ show stats.packageResults.fail <> " fully failed" , indent "---" , formatErrors stats.packageErrors @@ -747,8 +1162,8 @@ calculateImportStats legacyRegistry imported = do packagesProcessed = Map.size legacyRegistry - packageNamesReserved = - Map.size imported.reservedPackages + packageNamesRemoved = + Map.size imported.removedPackages packageResults = do let succeeded = Map.keys registryIndex @@ -801,7 +1216,7 @@ calculateImportStats legacyRegistry imported = do { packagesProcessed , versionsProcessed - , packageNamesReserved + , packageNamesRemoved , packageResults , versionResults , packageErrors @@ -830,12 +1245,131 @@ legacyRepoParser = do pure { owner, repo } +fetchSpagoYaml :: forall r. Address -> RawVersion -> Run (GITHUB + LOG + EXCEPT String + r) (Maybe Manifest) +fetchSpagoYaml address ref = do + eitherSpagoYaml <- GitHub.getContent address ref "spago.yaml" + case eitherSpagoYaml of + Left err -> do + Log.debug $ "No spago.yaml found: " <> Octokit.printGitHubError err + pure Nothing + Right contents -> do + Log.debug $ "Found spago.yaml file\n" <> contents + case parseYaml SpagoYaml.spagoYamlCodec contents of + Left error -> do + Log.warn $ "Failed to parse spago.yaml file:\n" <> contents <> "\nwith errors:\n" <> error + pure Nothing + Right { package: Just { publish: Just { location: Just location } } } + | location /= GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } -> do + Log.warn "spago.yaml file does not use the same location it was fetched from, this is disallowed..." + pure Nothing + Right config -> case SpagoYaml.spagoYamlToManifest config of + Left err -> do + Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> contents <> "\nwith errors:\n" <> err + pure Nothing + Right manifest -> do + Log.debug "Successfully converted a spago.yaml into a purs.json manifest" + pure $ Just manifest + +-- | Find the first compiler that can compile the package source code and +-- | installed resolutions from the given array of compilers. Begins with the +-- | latest compiler and works backwards to older compilers. +findFirstCompiler + :: forall r + . { compilers :: Array Version + , manifest :: Manifest + , resolutions :: Map PackageName Version + , source :: FilePath + , installed :: FilePath + } + -> Run (COMPILER_CACHE + STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) +findFirstCompiler { source, manifest, resolutions, compilers, installed } = do + search <- Run.Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do + result <- Cache.get API._compilerCache (API.Compilation manifest resolutions target) >>= case _ of + Nothing -> do + Log.info $ "Not cached, trying compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + let cache = { result: map (const unit) result, target } + Cache.put API._compilerCache (API.Compilation manifest resolutions target) cache + pure cache.result + Just cached -> + pure cached.result + + case result of + Left error -> pure $ Tuple target error + Right _ -> Run.Except.throw target + + case search of + Left worked -> pure $ Right worked + Right others -> pure $ Left $ Map.fromFoldable others + +type GroupedByCompilers = + { packages :: Map PackageName Version + , compilers :: NonEmptySet Version + } + +groupedByCompilersCodec :: CJ.Codec GroupedByCompilers +groupedByCompilersCodec = CJ.named "GroupedByCompilers" $ CJ.Record.object + { compilers: CJ.Common.nonEmptySet Version.codec + , packages: Internal.Codec.packageMap Version.codec + } + +-- | Given a set of package versions, determine the set of compilers that can be +-- | used for all packages. +compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Either (Array GroupedByCompilers) (NonEmptySet Version) +compatibleCompilers allMetadata resolutions = do + let + associated :: Array { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } + associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do + Metadata metadata <- Map.lookup name allMetadata + published <- Map.lookup version metadata.published + case published.compilers of + Left _ -> Nothing + Right compilers -> Just { name, version, compilers: compilers } + + case Array.uncons associated of + Nothing -> + Left [] + Just { head, tail: [] } -> + Right $ NonEmptySet.fromFoldable1 head.compilers + Just { head, tail } -> do + let foldFn prev = Set.intersection prev <<< Set.fromFoldable <<< _.compilers + case NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head.compilers) tail of + -- An empty intersection means there are no shared compilers among the + -- resolved dependencies. + Nothing -> do + let + grouped :: Array (NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version }) + grouped = Array.groupAllBy (compare `on` _.compilers) (Array.cons head tail) + + collect :: NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } -> GroupedByCompilers + collect vals = + { packages: Map.fromFoldable (map (\{ name, version } -> Tuple name version) vals) + -- We've already grouped by compilers, so those must all be equal + -- and we can take just the first value. + , compilers: NonEmptySet.fromFoldable1 (NonEmptyArray.head vals).compilers + } + Left $ Array.foldl (\prev -> Array.snoc prev <<< collect) [] grouped + + Just set -> + Right set + +type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) + +_importCache :: Proxy "importCache" +_importCache = Proxy + -- | A key type for the storage cache. Only supports packages identified by -- | their name and version. data ImportCache :: (Type -> Type -> Type) -> Type -> Type data ImportCache c a = ImportManifest PackageName RawVersion (c (Either VersionValidationError Manifest) a) - | PublishFailure PackageName Version (c String a) + | PublishFailure PackageName Version (c PublishError a) instance Functor2 c => Functor (ImportCache c) where map k (ImportManifest name version a) = ImportManifest name version (map2 k a) @@ -846,7 +1380,7 @@ instance MemoryEncodable ImportCache where ImportManifest name (RawVersion version) next -> Exists.mkExists $ Key ("ImportManifest__" <> PackageName.print name <> "__" <> version) next PublishFailure name version next -> do - Exists.mkExists $ Key ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) next + Exists.mkExists $ Key ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) next instance FsEncodable ImportCache where encodeFs = case _ of @@ -854,10 +1388,5 @@ instance FsEncodable ImportCache where let codec = CJ.Common.either versionValidationErrorCodec Manifest.codec Exists.mkExists $ AsJson ("ImportManifest__" <> PackageName.print name <> "__" <> version) codec next PublishFailure name version next -> do - let codec = CJ.string - Exists.mkExists $ AsJson ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) codec next - -type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) - -_importCache :: Proxy "importCache" -_importCache = Proxy + let codec = publishErrorCodec + Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 0bcacc643..f0cb1c63f 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -16,6 +16,7 @@ import Effect.Class.Console (log) import Effect.Class.Console as Console import Node.Path as Path import Node.Process as Process +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache @@ -152,10 +153,11 @@ main = launchAff_ do interpret = Registry.interpret (Registry.handle registryEnv) >>> Storage.interpret (if arguments.upload then Storage.handleS3 { s3, cache } else Storage.handleReadOnly cache) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) + >>> Cache.interpret _compilerCache (Cache.handleFs cache) >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv @@ -237,7 +239,7 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish LegacyPackage + API.publish Nothing { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index ffd66dbd2..8fa9a7070 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -28,6 +28,7 @@ import Node.Path as Path import Node.Process as Node.Process import Node.Process as Process import Parsing as Parsing +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache @@ -127,7 +128,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) let @@ -148,6 +149,7 @@ main = launchAff_ do # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv diff --git a/types/v1/Metadata.dhall b/types/v1/Metadata.dhall index 84685290c..2f50decf6 100644 --- a/types/v1/Metadata.dhall +++ b/types/v1/Metadata.dhall @@ -14,6 +14,7 @@ let PublishedMetadata = { hash : Sha256 , bytes : Natural , publishedTime : ISO8601String + , compilers : < Single : Version | Many : List Version > } let UnpublishedMetadata =