diff --git a/.envrc b/.envrc new file mode 100644 index 000000000..3550a30f2 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 1305e5bf9..000000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: tests - -on: - push: - branches: [master] - pull_request: - -jobs: - tests: - runs-on: ubuntu-latest - steps: - # Setup - - name: Check out source repository - uses: actions/checkout@v2 - - - name: Install Nix - uses: DeterminateSystems/nix-installer-action@v4 - - - name: Setup Nix cache - uses: DeterminateSystems/magic-nix-cache-action@v2 - - # Separates all the cache downloading time from the time required to actually run a step - # of the workflow - - name: Initialize Nix - run: nix develop - - # Unfortunately I can't run 'spago test' in a derivation because it doesn't - # have a mode that ignores the cache. So we run it in a script instead. - # Once we can make this a normal derivation then we can delete this - # workflow file altogether. - - name: Run Spago tests - run: nix develop --command run-tests-script diff --git a/.gitignore b/.gitignore index 0c3248c1d..0330d5d95 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ /output-es /scratch /.vscode +/.temp result @@ -19,4 +20,3 @@ result # Keep it secret, keep it safe. .env -.envrc diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0651395fe..36493fe4d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,9 +39,23 @@ There are three more directories containing code for the registry. Finally, the `flake.nix` file orchestrates builds for the whole repository. +## Running the Registry Server Locally + +The registry server requires a `.env` file and an initialized database. To run the server for development: + +```sh +# 1. Ensure database is initialized (only needed once) +dbmate up + +# 2. Run the server (from the nix shell) +cd app && spago run +``` + +The server will load environment variables from the `.env` file in the project root and run on port 8080 by default. + ## Available Nix Commands -The Registry server can be run locally: +You can also run the packaged registry server: ```sh nix run .#server @@ -81,13 +95,6 @@ There is an integration test that will deploy the registry server and make reque nix build .#checks.x86_64-linux.integration ``` -You can "deploy" the registry server to a local VM and manually hit the API as if it were the production server: - -```sh -# The server will be available at localhost:8080 -nix run -``` - ### Testing Guidelines The PureScript code in the registry is well-tested, ranging from tests for individual functions to full end-to-end tests for the registry server running in a NixOS machine configured the same way as the deployed machine. The smaller and more pure the test, the easier it is to write and maintain; most code is tested via unit tests written with `spec`, and only the core pipelines are run in the integration test. diff --git a/SPEC.md b/SPEC.md index c0f7094c7..423d0d80d 100644 --- a/SPEC.md +++ b/SPEC.md @@ -207,7 +207,7 @@ Note: - Globs you provide at the `includeFiles` and `excludeFiles` keys must contain only `*`, `**`, `/`, `.`, `..`, and characters for Linux file paths. It is not possible to negate a glob (ie. the `!` character), and globs cannot represent a path out of the package source directory. - When packaging your project source, the registry will first "include" your `src` directory and always-included files such as your `purs.json` file. Then it will include files which match globs indicated by the `includeFiles` key ([always-ignored files](#always-ignored-files) cannot be included). Finally, it will apply the excluding globs indicated by the `excludeFiles` key to the included files ([always-included files](#always-included-files) cannot be excluded). -- Dependencies you provide at the `dependencies` key must exist in the registry, and the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies). +- Dependencies you provide at the `dependencies` key must exist in the registry, the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies), and transitive dependencies are not allowed (ie. any modules you import in your code must come from packages listed in your dependencies). For example: @@ -234,11 +234,12 @@ For example: All packages in the registry have an associated metadata file, which is located in the `metadata` directory of the `registry` repository under the package name. For example, the metadata for the `aff` package is located at: https://github.com/purescript/registry/blob/main/metadata/aff.json. Metadata files are the source of truth on all published and unpublished versions for a particular package for what there content is and where the package is located. Metadata files are produced by the registry, not by package authors, though they take some information from package manifests. -Each published version of a package records three fields: +Each published version of a package records the following fields: - `hash`: a [`Sha256`](#Sha256) of the compressed archive fetched by the registry for the given version - `bytes`: the size of the tarball in bytes - `publishedTime`: the time the package was published as an `ISO8601` string +- `compilers`: compiler versions this package is known to work with. This field can be in one of two states: a single version indicates that the package worked with a specific compiler on upload but has not yet been tested with all compilers, whereas a non-empty array of versions indicates the package has been tested with all compilers the registry supports. Each unpublished version of a package records three fields: diff --git a/app/default.nix b/app/default.nix deleted file mode 100644 index 2c757bcd8..000000000 --- a/app/default.nix +++ /dev/null @@ -1,160 +0,0 @@ -{ - makeWrapper, - lib, - stdenv, - purs-backend-es-unstable, - esbuild, - writeText, - nodejs, - compilers, - purs-versions, - dhall, - dhall-json, - git, - git-lfs, - licensee, - coreutils, - gzip, - gnutar, - # from the registry at the top level - spago-lock, - package-lock, -}: -let - # Since both the importer and the server share the same build process, we - # don't need to build them twice separately and can share an optimized output - # directory. - shared = stdenv.mkDerivation { - name = "registry-app-shared"; - src = ./.; - phases = [ - "buildPhase" - "installPhase" - ]; - nativeBuildInputs = [ purs-backend-es-unstable ]; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - ln -s ${spago-lock}/output . - echo "Optimizing with purs-backend-es..." - purs-backend-es build - ''; - installPhase = '' - mkdir $out; - cp -r output-es $out/output; - # This for loop exists because purs-backend-es finds the corefn.json files - # just fine, but doesn't find the foreign.js files. - # I suspect this is because of a quirky interaction between Nix and `copyFile`, - # but I'm not sure how to fix it so we work around it by copying the foreign - # modules by hand. - for dir in output/*/; do - subdir=$(basename "$dir") - if [ -f "output/$subdir/foreign.js" ]; then - cp "output/$subdir/foreign.js" "$out/output/$subdir/" || true; - fi - done - ''; - }; -in -{ - server = stdenv.mkDerivation rec { - name = "registry-server"; - src = ./.; - database = ../db; - nativeBuildInputs = [ - esbuild - makeWrapper - ]; - buildInputs = [ nodejs ]; - entrypoint = writeText "entrypoint.js" '' - import { main } from "./output/Registry.App.Server"; - main(); - ''; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - cp -r ${shared}/output . - cp ${entrypoint} entrypoint.js - esbuild entrypoint.js --bundle --outfile=${name}.js --platform=node --packages=external - ''; - installPhase = '' - mkdir -p $out/bin - - echo "Copying files..." - cp ${name}.js $out/${name}.js - ln -s ${package-lock}/js/node_modules $out - - echo "Copying database..." - cp -r ${database} $out/bin/db - - echo "Creating node script..." - echo '#!/usr/bin/env sh' > $out/bin/${name} - echo 'exec ${nodejs}/bin/node '"$out/${name}.js"' "$@"' >> $out/bin/${name} - chmod +x $out/bin/${name} - ''; - postFixup = '' - wrapProgram $out/bin/${name} \ - --set PATH ${ - lib.makeBinPath [ - compilers - purs-versions - dhall - dhall-json - licensee - git - git-lfs - coreutils - gzip - gnutar - ] - } \ - ''; - }; - - github-importer = stdenv.mkDerivation rec { - name = "registry-github-importer"; - src = ./.; - nativeBuildInputs = [ - esbuild - makeWrapper - ]; - buildInputs = [ nodejs ]; - entrypoint = writeText "entrypoint.js" '' - import { main } from "./output/Registry.App.GitHubIssue"; - main(); - ''; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - cp -r ${shared}/output . - cp ${entrypoint} entrypoint.js - esbuild entrypoint.js --bundle --outfile=${name}.js --platform=node --packages=external - ''; - installPhase = '' - mkdir -p $out/bin $out - - echo "Copying files..." - cp ${name}.js $out/${name}.js - ln -s ${package-lock}/js/node_modules $out - - echo "Creating node script..." - echo '#!/usr/bin/env sh' > $out/bin/${name} - echo 'exec ${nodejs}/bin/node '"$out/${name}.js"' "$@"' >> $out/bin/${name} - chmod +x $out/bin/${name} - ''; - postFixup = '' - wrapProgram $out/bin/${name} \ - --set PATH ${ - lib.makeBinPath [ - compilers - purs-versions - dhall - dhall-json - licensee - git - git-lfs - coreutils - gzip - gnutar - ] - } \ - ''; - }; -} diff --git a/app/fixtures/github-packages/transitive-1.0.0/bower.json b/app/fixtures/github-packages/transitive-1.0.0/bower.json new file mode 100644 index 000000000..d0d4d0bd1 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/bower.json @@ -0,0 +1,12 @@ +{ + "name": "purescript-transitive", + "homepage": "https://github.com/purescript/purescript-transitive", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-transitive.git" + }, + "dependencies": { + "purescript-effect": "^4.0.0" + } +} diff --git a/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs new file mode 100644 index 000000000..71d771f62 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs @@ -0,0 +1,6 @@ +module Transitive where + +import Prelude + +uno :: Int +uno = one diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 0cffc4ab8..965567c83 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -6,6 +6,7 @@ "published": { "6.0.1": { "bytes": 31142, + "compilers": ["0.15.3", "0.15.4", "0.15.5"], "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index 68f250604..b5d5a86ea 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -6,6 +6,7 @@ "published": { "4.0.1": { "bytes": 2184, + "compilers": ["0.15.2", "0.15.3", "0.15.4"], "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 19e09564c..89322d52b 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1,14 +1,19 @@ module Registry.App.API ( AuthenticatedEffects + , COMPILER_CACHE + , CompilerCache(..) , PackageSetUpdateEffects , PublishEffects + , _compilerCache , authenticated , copyPackageSourceFiles + , findAllCompilers , formatPursuitResolutions + , installBuildPlan , packageSetUpdate , packagingTeam - , parseInstalledModulePath , publish + , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -16,13 +21,15 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array -import Data.Array.NonEmpty as NEA import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.JSON as CJ +import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record -import Data.DateTime (DateTime) +import Data.Exists as Exists import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) +import Data.List.NonEmpty as NonEmptyList +import Data.Map (SemigroupMap(..)) import Data.Map as Map import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format @@ -33,7 +40,7 @@ import Data.String.CodeUnits as String.CodeUnits import Data.String.NonEmpty as NonEmptyString import Data.String.Regex as Regex import Effect.Aff as Aff -import Effect.Ref as Ref +import Effect.Unsafe (unsafePerformEffect) import JSON as JSON import Node.ChildProcess.Types (Exit(..)) import Node.FS.Aff as FS.Aff @@ -46,9 +53,12 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Registry.App.Auth as Auth -import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Cache (class FsEncodable, Cache) +import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment (COMMENT) import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) @@ -79,13 +89,14 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Octokit (IssueNumber(..), Team) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest import Registry.Metadata as Metadata import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetUpdateData, PublishData) import Registry.Operation as Operation -import Registry.Operation.Validation (UnpublishError(..), validateNoExcludedObligatoryFiles) +import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation import Registry.Owner as Owner import Registry.PackageName as PackageName @@ -94,12 +105,14 @@ import Registry.PursGraph (ModuleName(..)) import Registry.PursGraph as PursGraph import Registry.Range as Range import Registry.Sha256 as Sha256 +import Registry.Solver (CompilerIndex, DependencyIndex, Intersection, SolverErrors) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +import Safe.Coerce as Safe.Coerce type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) @@ -253,7 +266,7 @@ authenticated auth = case auth.payload of pure published pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Unpublishing is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -291,7 +304,7 @@ authenticated auth = case auth.payload of Just value -> pure value pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Transferring is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -315,17 +328,21 @@ authenticated auth = case auth.payload of Registry.mirrorLegacyRegistry payload.name payload.newLocation Comment.comment "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be -- | upload. If it has been published before then the existing metadata will be -- | updated with the new version. -publish :: forall r. PackageSource -> PublishData -> Run (PublishEffects + r) Unit -publish source payload = do +-- +-- The legacyIndex argument contains the unverified manifests produced by the +-- legacy importer; these manifests can be used on legacy packages to conform +-- them to the registry rule that transitive dependencies are not allowed. +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name - Log.debug $ "Publishing " <> printPackageSource source <> " package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload + Log.debug $ "Publishing package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload Log.debug $ "Verifying metadata..." Metadata existingMetadata <- Registry.readMetadata payload.name >>= case _ of @@ -368,23 +385,28 @@ publish source payload = do -- the package directory along with its detected publish time. Log.debug "Metadata validated. Fetching package source code..." tmp <- Tmp.mkTmpDir - { path: packageDirectory, published: publishedTime } <- Source.fetch source tmp existingMetadata.location payload.ref + { path: downloadedPackage, published: publishedTime } <- Source.fetch tmp existingMetadata.location payload.ref - Log.debug $ "Package downloaded to " <> packageDirectory <> ", verifying it contains a src directory with valid modules..." - Internal.Path.readPursFiles (Path.concat [ packageDirectory, "src" ]) >>= case _ of + Log.debug $ "Package downloaded to " <> downloadedPackage <> ", verifying it contains a src directory with valid modules..." + Internal.Path.readPursFiles (Path.concat [ downloadedPackage, "src" ]) >>= case _ of Nothing -> Except.throw $ Array.fold [ "This package has no PureScript files in its `src` directory. " , "All package sources must be in the `src` directory, with any additional " , "sources indicated by the `files` key in your manifest." ] - Just files -> do + Just files -> + -- The 'validatePursModules' function uses language-cst-parser, which only + -- supports syntax back to 0.15.0. We'll still try to validate the package + -- but it may fail to parse. Operation.Validation.validatePursModules files >>= case _ of + Left formattedError | payload.compiler < Purs.minLanguageCSTParser -> do + Log.debug $ "Package failed to parse in validatePursModules: " <> formattedError + Log.debug $ "Skipping check because package is published with a pre-0.15.0 compiler (" <> Version.print payload.compiler <> ")." Left formattedError -> Except.throw $ Array.fold [ "This package has either malformed or disallowed PureScript module names " - , "in its `src` directory. All package sources must be in the `src` directory, " - , "with any additional sources indicated by the `files` key in your manifest." + , "in its source: " , formattedError ] Right _ -> @@ -393,13 +415,18 @@ publish source payload = do -- If the package doesn't have a purs.json we can try to make one - possible scenarios: -- - in case it has a spago.yaml then we know how to read that, and have all the info to move forward -- - if it's a legacy import then we can try to infer as much info as possible to make a manifest - let packagePursJson = Path.concat [ packageDirectory, "purs.json" ] + let packagePursJson = Path.concat [ downloadedPackage, "purs.json" ] hadPursJson <- Run.liftEffect $ FS.Sync.exists packagePursJson - let packageSpagoYaml = Path.concat [ packageDirectory, "spago.yaml" ] + let packageSpagoYaml = Path.concat [ downloadedPackage, "spago.yaml" ] hasSpagoYaml <- Run.liftEffect $ FS.Sync.exists packageSpagoYaml - Manifest manifest <- + address <- case existingMetadata.location of + Git _ -> Except.throw "Packages can only come from GitHub for now." + GitHub { subdir: Just subdir } -> Except.throw $ "Packages cannot yet use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." + GitHub { owner, repo } -> pure { owner, repo } + + Manifest receivedManifest <- if hadPursJson then Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 packagePursJson)) >>= case _ of Left error -> do @@ -424,19 +451,15 @@ publish source payload = do Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do Comment.comment $ Array.fold - [ "Converted your spago.yaml into a purs.json manifest to use for publishing:\n" - , "```json" + [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```" + , "\n```\n" ] pure manifest else do Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." - address <- case existingMetadata.location of - Git _ -> Except.throw "Legacy packages can only come from GitHub." - GitHub { subdir: Just subdir } -> Except.throw $ "Legacy packages cannot use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." - GitHub { owner, repo } -> pure { owner, repo } version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -453,60 +476,69 @@ publish source payload = do Log.debug $ "Successfully produced a legacy manifest from the package source." let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest Comment.comment $ Array.fold - [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:\n" - , "```json" + [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```" + , "\n```\n" ] pure manifest -- We trust the manifest for any changes to the 'owners' field, but for all -- other fields we trust the registry metadata. - let metadata = existingMetadata { owners = manifest.owners } - unless (Operation.Validation.nameMatches (Manifest manifest) payload) do + let metadata = existingMetadata { owners = receivedManifest.owners } + unless (Operation.Validation.nameMatches (Manifest receivedManifest) payload) do Except.throw $ Array.fold [ "The manifest file specifies a package name (" - , PackageName.print manifest.name + , PackageName.print receivedManifest.name , ") that differs from the package name submitted to the API (" , PackageName.print payload.name , "). The manifest and API request must match." ] - unless (Operation.Validation.locationMatches (Manifest manifest) (Metadata metadata)) do - Except.throw $ Array.fold - [ "The manifest file specifies a location (" - , stringifyJson Location.codec manifest.location - , ") that differs from the location in the registry metadata (" - , stringifyJson Location.codec metadata.location - , "). If you would like to change the location of your package you should " - , "submit a transfer operation." - ] + unless (Operation.Validation.locationMatches (Manifest receivedManifest) (Metadata metadata)) do + if isJust maybeLegacyIndex then + -- The legacy importer is sometimes run on older packages, some of which have been transferred. Since + -- package metadata only records the latest location, this can cause a problem: the manifest reports + -- the location at the time, but the metadata reports the current location. + Log.warn $ Array.fold + [ "In legacy mode and manifest location differs from existing metadata. This indicates a package that was " + , "transferred from a previous location. Ignoring location match validation..." + ] + else + Except.throw $ Array.fold + [ "The manifest file specifies a location (" + , stringifyJson Location.codec receivedManifest.location + , ") that differs from the location in the registry metadata (" + , stringifyJson Location.codec metadata.location + , "). If you would like to change the location of your package you should " + , "submit a transfer operation." + ] - when (Operation.Validation.isMetadataPackage (Manifest manifest)) do + when (Operation.Validation.isMetadataPackage (Manifest receivedManifest)) do Except.throw "The `metadata` package cannot be uploaded to the registry because it is a protected package." - for_ (Operation.Validation.isNotUnpublished (Manifest manifest) (Metadata metadata)) \info -> do + for_ (Operation.Validation.isNotUnpublished (Manifest receivedManifest) (Metadata metadata)) \info -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that has been unpublished: " <> Version.print manifest.version + [ "You tried to upload a version that has been unpublished: " <> Version.print receivedManifest.version , "" , "```json" , printJson Metadata.unpublishedMetadataCodec info , "```" ] - case Operation.Validation.isNotPublished (Manifest manifest) (Metadata metadata) of + case Operation.Validation.isNotPublished (Manifest receivedManifest) (Metadata metadata) of -- If the package has been published already, then we check whether the published -- version has made it to Pursuit or not. If it has, then we terminate here. If -- it hasn't then we publish to Pursuit and then terminate. Just info -> do - published <- Pursuit.getPublishedVersions manifest.name >>= case _ of + published <- Pursuit.getPublishedVersions receivedManifest.name >>= case _ of Left error -> Except.throw error Right versions -> pure versions - case Map.lookup manifest.version published of + case Map.lookup receivedManifest.version published of Just url -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that already exists: " <> Version.print manifest.version + [ "You tried to upload a version that already exists: " <> Version.print receivedManifest.version , "" , "Its metadata is:" , "```json" @@ -517,300 +549,272 @@ publish source payload = do , url ] + Nothing | payload.compiler < Purs.minPursuitPublish -> do + Comment.comment $ Array.fold + [ "This version has already been published to the registry, but the docs have not been " + , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " + , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish + , ". Please try with a later compiler." + ] + Nothing -> do Comment.comment $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions - compilationResult <- compilePackage { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } + compilerIndex <- readCompilerIndex + verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + let installedResolutions = Path.concat [ tmp, ".registry" ] + installBuildPlan verifiedResolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just downloadedPackage + } case compilationResult of - Left error -> do + Left compileFailure -> do + let error = printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do + Right _ -> do Log.debug "Uploading to Pursuit" -- While we have created a manifest from the package source, we -- still need to ensure a purs.json file exists for 'purs publish'. unless hadPursJson do - existingManifest <- ManifestIndex.readManifest manifest.name manifest.version + existingManifest <- ManifestIndex.readManifest receivedManifest.name receivedManifest.version case existingManifest of Nothing -> Except.throw "Version was previously published, but we could not find a purs.json file in the package source, and no existing manifest was found in the registry." Just existing -> Run.liftAff $ writeJsonFile Manifest.codec packagePursJson existing - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of + Left publishErr -> Except.throw publishErr + Right _ -> do + FS.Extra.remove tmp + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" -- In this case the package version has not been published, so we proceed -- with ordinary publishing. - Nothing -> - -- Now that we've verified the package we can write the manifest to the source - -- directory and then publish it. - if hadPursJson then do - -- No need to verify the generated manifest because nothing was generated, - -- and no need to write a file (it's already in the package source.) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - - else if hasSpagoYaml then do - -- We need to write the generated purs.json file, but because spago-next - -- already does unused dependency checks and supports explicit test-only - -- dependencies we can skip those checks. - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } + Nothing -> do + Log.info "Verifying the package build plan..." + compilerIndex <- readCompilerIndex + validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + + Comment.comment "Verifying unused and/or missing dependencies..." + + -- First we install the resolutions and call 'purs graph' to adjust the + -- manifest as needed, but we defer compilation until after this check + -- in case the package manifest and resolutions are adjusted. + let installedResolutions = Path.concat [ tmp, ".registry" ] + installBuildPlan validatedResolutions installedResolutions + + let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] + let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] + let pursGraph = Purs.Graph { globs: [ srcGlobs, depGlobs ] } + + -- We need to use the minimum compiler version that supports 'purs graph'. + let pursGraphCompiler = if payload.compiler >= Purs.minPursGraph then payload.compiler else Purs.minPursGraph + + -- In this step we run 'purs graph' to get a graph of the package source + -- and installed dependencies and use that to determine if the manifest + -- contains any unused or missing dependencies. If it does and is a legacy + -- manifest then we fix it and return the result. If does and is a modern + -- manifest (spago.yaml, purs.json, etc.) then we reject it. If it doesn't + -- then we simply return the manifest and resolutions we already had. + Tuple manifest resolutions <- Run.liftAff (Purs.callCompiler { command: pursGraph, version: Just pursGraphCompiler, cwd: Nothing }) >>= case _ of + Left err -> case err of + UnknownError str -> Except.throw str + MissingCompiler -> Except.throw $ "Missing compiler " <> Version.print pursGraphCompiler + CompilationError errs -> do + Log.warn $ Array.fold + [ "Failed to discover unused dependencies because purs graph failed:\n" + , Purs.printCompilerErrors errs + ] + -- The purs graph command will fail if the source code uses syntax + -- before the oldest usable purs graph compiler (ie. 0.14.0). In + -- this case we simply accept the dependencies as-is, even though + -- they could technically violate Registry rules around missing and + -- unused dependencies. This only affects old packages and we know + -- they compile, so we've decided it's an acceptable exception. + pure $ Tuple (Manifest receivedManifest) validatedResolutions + Right output -> case JSON.parse output of + Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr + Right json -> case CJ.decode PursGraph.pursGraphCodec json of + Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CJ.DecodeError.print decodeErr + Right graph -> do + Log.debug "Got a valid graph of source and dependencies." + let + pathParser path = map _.name $ case String.stripPrefix (String.Pattern installedResolutions) path of + Just trimmed -> parseModulePath trimmed + Nothing -> case String.stripPrefix (String.Pattern downloadedPackage) path of + Just _ -> Right { name: receivedManifest.name, version: receivedManifest.version } + Nothing -> Left $ "Failed to parse module path " <> path <> " because it is not in the package source or installed dependencies." + + case Operation.Validation.noTransitiveOrMissingDeps (Manifest receivedManifest) graph pathParser of + -- Association failures should always throw + Left (Left assocErrors) -> + Except.throw $ Array.fold + [ "Failed to validate unused / missing dependencies because modules could not be associated with package names:" + , flip NonEmptyArray.foldMap1 assocErrors \{ error, module: ModuleName moduleName, path } -> + "\n - " <> moduleName <> " (" <> path <> "): " <> error + ] + + -- FIXME: For now we attempt to fix packages if a legacy index + -- is provided (ie. the publish is via the importer) but we + -- should at some point make this a hard error. + Left (Right depError) -> case maybeLegacyIndex of + Nothing -> + Except.throw $ "Failed to validate unused / missing dependencies: " <> Operation.Validation.printValidateDepsError depError + Just legacyIndex -> do + Log.info $ "Found fixable dependency errors: " <> Operation.Validation.printValidateDepsError depError + conformLegacyManifest (Manifest receivedManifest) payload.compiler compilerIndex legacyIndex depError + + -- If the check passes then we can simply return the manifest and + -- resolutions. + Right _ -> pure $ Tuple (Manifest receivedManifest) validatedResolutions + + -- Now that we've verified the package we can write the manifest to the + -- source directory. + Run.liftAff $ writeJsonFile Manifest.codec packagePursJson manifest + + Log.info "Creating packaging directory" + let packageDirname = PackageName.print receivedManifest.name <> "-" <> Version.print receivedManifest.version + let packageSource = Path.concat [ tmp, packageDirname ] + FS.Extra.ensureDirectory packageSource + -- We copy over all files that are always included (ie. src dir, purs.json file), + -- and any files the user asked for via the 'files' key, and remove all files + -- that should never be included (even if the user asked for them). + copyPackageSourceFiles { includeFiles: receivedManifest.includeFiles, excludeFiles: receivedManifest.excludeFiles, source: downloadedPackage, destination: packageSource } + removeIgnoredTarballFiles packageSource + + -- Now that we have the package source contents we can verify we can compile + -- the package with exactly what is going to be uploaded. + Comment.comment $ Array.fold + [ "Verifying package compiles using compiler " + , Version.print payload.compiler + , " and resolutions:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Version.codec) resolutions + , "\n```" + ] - -- Otherwise this is a legacy package, generated from a combination of bower, - -- spago.dhall, and package set files, so we need to verify the generated - -- manifest does not contain unused dependencies before writing it. + -- We clear the installation directory so that no old installed resolutions + -- stick around. + Run.liftAff $ FS.Extra.remove installedResolutions + installBuildPlan resolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just tmp + } + + case compilationResult of + Left compileFailure -> do + let error = printCompilerFailure payload.compiler compileFailure + Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error + Right _ -> pure unit + + Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + let tarballName = packageDirname <> ".tar.gz" + let tarballPath = Path.concat [ tmp, tarballName ] + Tar.create { cwd: tmp, folderName: packageDirname } + + Log.info "Tarball created. Verifying its size..." + bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath + for_ (Operation.Validation.validateTarballSize bytes) case _ of + Operation.Validation.ExceedsMaximum maxPackageBytes -> + Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." + Operation.Validation.WarnPackageSize maxWarnBytes -> + Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + + -- If a package has under ~30 bytes it's about guaranteed that packaging the + -- tarball failed. This can happen if the system running the API has a non- + -- GNU tar installed, for example. + let minBytes = 30.0 + when (bytes < minBytes) do + Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." + + hash <- Sha256.hashFile tarballPath + Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." + Log.info $ "Tarball hash: " <> Sha256.print hash + + Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath + Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." + let newPublishedVersion = { hash, ref: payload.ref, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } + let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } + + Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) + Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + + -- We write to the registry index if possible. If this fails, the packaging + -- team should manually insert the entry. + Log.debug "Adding the new version to the registry index" + Registry.writeManifest manifest + + Registry.mirrorLegacyRegistry payload.name newMetadata.location + Comment.comment "Mirrored registry operation to the legacy registry!" + + Log.debug "Uploading package documentation to Pursuit" + if payload.compiler >= Purs.minPursuitPublish then + -- TODO: We must use the 'downloadedPackage' instead of 'packageSource' + -- because Pursuit requires a git repository, and our tarball directory + -- is not one. This should be changed once Pursuit no longer needs git. + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of + Left publishErr -> do + Log.error publishErr + Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Right _ -> + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Log.debug "Pruning unused dependencies from legacy package manifest..." - - Log.debug "Solving manifest to get all transitive dependencies." - resolutions <- verifyResolutions (Manifest manifest) payload.resolutions - - Log.debug "Installing dependencies." - tmpDepsDir <- Tmp.mkTmpDir - installBuildPlan resolutions tmpDepsDir - - Log.debug "Discovering used dependencies from source." - let srcGlobs = Path.concat [ packageDirectory, "src", "**", "*.purs" ] - let depGlobs = Path.concat [ tmpDepsDir, "*", "src", "**", "*.purs" ] - let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] } - -- We need to use the minimum compiler version that supports 'purs graph' - let minGraphCompiler = unsafeFromRight (Version.parse "0.13.8") - let callCompilerVersion = if payload.compiler >= minGraphCompiler then payload.compiler else minGraphCompiler - Run.liftAff (Purs.callCompiler { command, version: Just callCompilerVersion, cwd: Nothing }) >>= case _ of - Left err -> do - let prefix = "Failed to discover unused dependencies because purs graph failed: " - Log.error $ prefix <> case err of - UnknownError str -> str - CompilationError errs -> Purs.printCompilerErrors errs - MissingCompiler -> "missing compiler " <> Version.print payload.compiler - -- We allow legacy packages through even if we couldn't run purs graph, - -- because we can't be sure we chose the correct compiler version. - if source == LegacyPackage then - Comment.comment "Failed to prune dependencies for legacy package, continuing anyway..." - else do - Except.throw "purs graph failed; cannot verify unused dependencies." - Right output -> case JSON.parse output of - Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr - Right json -> case CJ.decode PursGraph.pursGraphCodec json of - Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CJ.DecodeError.print decodeErr - Right graph -> do - Log.debug "Got a valid graph of source and dependencies. Removing install dir and associating discovered modules with their packages..." - FS.Extra.remove tmpDepsDir - - let - -- We need access to a graph that _doesn't_ include the package - -- source, because we only care about dependencies of the package. - noSrcGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - - pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmpDepsDir, path: _ } - - case PursGraph.associateModules pathParser noSrcGraph of - Left errs -> - Except.throw $ String.joinWith "\n" - [ "Failed to associate modules with packages while finding unused dependencies:" - , flip NonEmptyArray.foldMap1 errs \{ error, module: ModuleName moduleName, path } -> - " - " <> moduleName <> " (" <> path <> "): " <> error <> "\n" - ] - Right modulePackageMap -> do - Log.debug "Associated modules with their package names. Finding all modules used in package source..." - -- The modules used in the package source code are any that have - -- a path beginning with the package source directory. We only - -- care about dependents of these modules. - let sourceModules = Map.keys $ Map.filter (isJust <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - - Log.debug "Found all modules used in package source. Finding all modules used by those modules..." - let allReachableModules = PursGraph.allDependenciesOf sourceModules graph - - -- Then we can associate each reachable module with its package - -- name to get the full set of used package names. - let allUsedPackages = Set.mapMaybe (flip Map.lookup modulePackageMap) allReachableModules - - -- Finally, we can use this to find the unused dependencies. - Log.debug "Found all packages reachable by the project source code. Determining unused dependencies..." - case Operation.Validation.getUnusedDependencies (Manifest manifest) resolutions allUsedPackages of - Nothing -> do - Log.debug "No unused dependencies! This manifest is good to go." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - Just isUnused -> do - let printed = String.joinWith ", " (PackageName.print <$> NonEmptySet.toUnfoldable isUnused) - Log.debug $ "Found unused dependencies: " <> printed - Comment.comment $ "Generated legacy manifest contains unused dependencies which will be removed: " <> printed - let verified = manifest { dependencies = Map.filterKeys (not <<< flip NonEmptySet.member isUnused) manifest.dependencies } - Log.debug "Writing updated, pruned manifest." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest verified) - publishRegistry - { source - , manifest: Manifest verified - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - -type PublishRegistry = - { source :: PackageSource - , manifest :: Manifest - , metadata :: Metadata - , payload :: PublishData - , publishedTime :: DateTime - , tmp :: FilePath - , packageDirectory :: FilePath - } + Comment.comment $ Array.fold + [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" + , Version.print payload.compiler + , "). If you want to publish documentation, please try again with a later compiler." + ] --- A private helper function for publishing to the registry. Separated out of --- the main 'publish' function because we sometimes use the publish function to --- publish to Pursuit only (in the case the package has been pushed to the --- registry, but docs have not been uploaded). -publishRegistry :: forall r. PublishRegistry -> Run (PublishEffects + r) Unit -publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do - Log.debug "Verifying the package build plan..." - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions - - Log.debug "Verifying that the package dependencies are all registered..." - unregisteredRef <- Run.liftEffect $ Ref.new Map.empty - forWithIndex_ verifiedResolutions \name version -> do - Registry.readMetadata name >>= case _ of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just (Metadata { published }) -> case Map.lookup version published of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just _ -> pure unit - unregistered <- Run.liftEffect $ Ref.read unregisteredRef - unless (Map.isEmpty unregistered) do - Except.throw $ Array.fold - [ "Cannot register this package because it has unregistered dependencies: " - , Array.foldMap (\(Tuple name version) -> "\n - " <> formatPackageVersion name version) (Map.toUnfoldable unregistered) - ] + Comment.comment "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } - Log.info "Packaging tarball for upload..." - let newDir = PackageName.print manifest.name <> "-" <> Version.print manifest.version - let packageSourceDir = Path.concat [ tmp, newDir ] - Log.debug $ "Creating packaging directory at " <> packageSourceDir - FS.Extra.ensureDirectory packageSourceDir - -- We copy over all files that are always included (ie. src dir, purs.json file), - -- and any files the user asked for via the 'files' key, and remove all files - -- that should never be included (even if the user asked for them). - copyPackageSourceFiles { includeFiles: manifest.includeFiles, excludeFiles: manifest.excludeFiles, source: packageDirectory, destination: packageSourceDir } - Log.debug "Removing always-ignored files from the packaging directory." - removeIgnoredTarballFiles packageSourceDir - - let tarballName = newDir <> ".tar.gz" - let tarballPath = Path.concat [ tmp, tarballName ] - Tar.create { cwd: tmp, folderName: newDir } - - Log.info "Tarball created. Verifying its size..." - bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath - for_ (Operation.Validation.validateTarballSize bytes) case _ of - Operation.Validation.ExceedsMaximum maxPackageBytes -> - Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." - Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." - - -- If a package has under ~30 bytes it's about guaranteed that packaging the - -- tarball failed. This can happen if the system running the API has a non- - -- GNU tar installed, for example. - let minBytes = 30.0 - when (bytes < minBytes) do - Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." - - hash <- Sha256.hashFile tarballPath - Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." - Log.info $ "Tarball hash: " <> Sha256.print hash - - -- Now that we have the package source contents we can verify we can compile - -- the package. We skip failures when the package is a legacy package. - Log.info "Verifying package compiles (this may take a while)..." - compilationResult <- compilePackage - { packageSourceDir: packageDirectory - , compiler: payload.compiler - , resolutions: verifiedResolutions - } + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - case compilationResult of - Left error - -- We allow legacy packages to fail compilation because we do not - -- necessarily know what compiler to use with them. - | source == LegacyPackage -> do - Log.debug error - Log.warn "Failed to compile, but continuing because this package is a legacy package." - | otherwise -> - Except.throw error - Right _ -> - pure unit - - Comment.comment "Package is verified! Uploading it to the storage backend..." - Storage.upload manifest.name manifest.version tarballPath - Log.debug $ "Adding the new version " <> Version.print manifest.version <> " to the package metadata file." - let newMetadata = metadata { published = Map.insert manifest.version { hash, ref: payload.ref, publishedTime, bytes } metadata.published } - Registry.writeMetadata manifest.name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" - - -- After a package has been uploaded we add it to the registry index, we - -- upload its documentation to Pursuit, and we can now process it for package - -- sets when the next batch goes out. - - -- We write to the registry index if possible. If this fails, the packaging - -- team should manually insert the entry. - Registry.writeManifest (Manifest manifest) - - when (source == CurrentPackage) $ case compilationResult of - Left error -> do - Log.error $ "Compilation failed, cannot upload to pursuit: " <> error - Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do - Log.debug "Uploading to Pursuit" - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } - - Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry." + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } + Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + + Comment.comment "Wrote completed metadata to the registry!" + FS.Extra.remove tmp -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the -- | manifest. If not, we solve their manifest to produce a build plan. -verifyResolutions :: forall r. Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + EXCEPT String + r) (Map PackageName Version) -verifyResolutions manifest resolutions = do +verifyResolutions :: forall r. CompilerIndex -> Version -> Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + AFF + EXCEPT String + r) (Map PackageName Version) +verifyResolutions compilerIndex compiler manifest resolutions = do Log.debug "Check the submitted build plan matches the manifest" - manifestIndex <- Registry.readAllManifests case resolutions of - Nothing -> case Operation.Validation.validateDependenciesSolve manifest manifestIndex of - Left errors -> do - let - printedError = String.joinWith "\n" - [ "Could not produce valid dependencies for manifest." - , "```" - , errors # foldMapWithIndex \index error -> String.joinWith "\n" - [ "[Error " <> show (index + 1) <> "]" - , Solver.printSolverError error - ] - , "```" - ] - Except.throw printedError - Right solved -> pure solved + Nothing -> do + case Operation.Validation.validateDependenciesSolve compiler manifest compilerIndex of + Left errors -> do + let + printedError = String.joinWith "\n" + [ "Could not produce valid dependencies for manifest." + , "```" + , errors # foldMapWithIndex \index error -> String.joinWith "\n" + [ "[Error " <> show (index + 1) <> "]" + , Solver.printSolverError error + ] + , "```" + ] + Except.throw printedError + Right solved -> pure solved Just provided -> do validateResolutions manifest provided pure provided @@ -860,61 +864,88 @@ validateResolutions manifest resolutions = do , incorrectVersionsError ] -type CompilePackage = - { packageSourceDir :: FilePath - , compiler :: Version - , resolutions :: Map PackageName Version +type FindAllCompilersResult = + { failed :: Map Version (Either SolverErrors CompilerFailure) + , succeeded :: Set Version } -compilePackage :: forall r. CompilePackage -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either String FilePath) -compilePackage { packageSourceDir, compiler, resolutions } = Except.runExcept do - tmp <- Tmp.mkTmpDir - let dependenciesDir = Path.concat [ tmp, ".registry" ] - FS.Extra.ensureDirectory dependenciesDir - - let - globs = - if Map.isEmpty resolutions then - [ "src/**/*.purs" ] - else - [ "src/**/*.purs" - , Path.concat [ dependenciesDir, "*/src/**/*.purs" ] - ] - - Log.debug "Installing build plan..." - installBuildPlan resolutions dependenciesDir - - Log.debug "Compiling..." - compilerOutput <- Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs } - , version: Just compiler - , cwd: Just packageSourceDir - } - - case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed for your package due to a compiler error:" - , "```" - , err - , "```" - ] - Right _ -> pure dependenciesDir +-- | Find all compilers that can compile the package source code and installed +-- | resolutions from the given array of compilers. +findAllCompilers + :: forall r + . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } + -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult +findAllCompilers { source, manifest, compilers } = do + compilerIndex <- readCompilerIndex + checkedCompilers <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target + case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of + Left solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + pure $ Left $ Tuple target (Left solverErrors) + Right (Tuple mbCompiler resolutions) -> do + Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions + case mbCompiler of + Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" + Just selected | selected /= target -> Except.throw $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print selected + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + Just _ -> pure unit + Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of + Nothing -> do + Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan resolutions installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Right _ -> do + Log.debug $ "Compilation succeeded with compiler " <> Version.print target + Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } + pure $ bimap (Tuple target <<< Right) (const target) result + Just { result } -> + pure $ bimap (Tuple target <<< Right) (const target) result + + let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers + pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] -- | Install all dependencies indicated by the build plan to the specified -- | directory. Packages will be installed at 'dir/package-name-x.y.z'. installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir -- We fetch every dependency at its resolved version, unpack the tarball, and -- store the resulting source code in a specified directory for dependencies. forWithIndex_ resolutions \name version -> do @@ -934,11 +965,10 @@ installBuildPlan resolutions dependenciesDir = do Log.debug $ "Installed " <> formatPackageVersion name version -- | Parse the name and version from a path to a module installed in the standard --- | form: '/-/...' -parseInstalledModulePath :: { prefix :: FilePath, path :: FilePath } -> Either String { name :: PackageName, version :: Version } -parseInstalledModulePath { prefix, path } = do +-- | form: '-...' +parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } +parseModulePath path = do packageVersion <- lmap Parsing.parseErrorMessage $ Parsing.runParser path do - _ <- Parsing.String.string prefix _ <- Parsing.Combinators.optional (Parsing.Combinators.try (Parsing.String.string Path.sep)) Tuple packageVersionChars _ <- Parsing.Combinators.Array.manyTill_ Parsing.String.anyChar (Parsing.String.string Path.sep) pure $ String.CodeUnits.fromCharArray (Array.fromFoldable packageVersionChars) @@ -955,38 +985,40 @@ parseInstalledModulePath { prefix, path } = do pure { name, version } type PublishToPursuit = - { packageSourceDir :: FilePath - , dependenciesDir :: FilePath + { source :: FilePath , compiler :: Version , resolutions :: Map PackageName Version + , installedResolutions :: FilePath } -- | Publishes a package to Pursuit. -- | -- | ASSUMPTIONS: This function should not be run on legacy packages or on --- | packages where the `purescript-` prefix is still present. +-- | packages where the `purescript-` prefix is still present. Cannot be used +-- | on packages prior to 'Purs.minPursuitPublish' publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) Unit -publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = do + -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) +publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir + when (compiler < Purs.minPursuitPublish) do + Except.throw $ "Cannot publish to Pursuit because this package was published with a pre-0.14.7 compiler (" <> Version.print compiler <> "). If you want to publish documentation, please try again with a later compiler." + let - resolvedPaths = formatPursuitResolutions { resolutions, dependenciesDir } + resolvedPaths = formatPursuitResolutions { resolutions, installedResolutions } resolutionsFilePath = Path.concat [ tmp, "resolutions.json" ] Run.liftAff $ writeJsonFile pursuitResolutionsCodec resolutionsFilePath resolvedPaths -- The 'purs publish' command requires a clean working tree, but it isn't - -- guaranteed that packages have an adequate .gitignore file; compilers prior - -- to 0.14.7 did not ignore the purs.json file when publishing. So we stash - -- changes made during the publishing process (ie. inclusion of a new purs.json - -- file and an output directory from compilation) before calling purs publish. + -- guaranteed that packages have an adequate .gitignore file. So we stash + -- stash changes made during the publishing process before calling publish. -- https://git-scm.com/docs/gitignore Log.debug "Adding output and purs.json to local git excludes..." - Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ packageSourceDir, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) + Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ source, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) -- NOTE: The compatibility version of purs publish appends 'purescript-' to the -- package name in the manifest file: @@ -997,27 +1029,12 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = compilerOutput <- Run.liftAff $ Purs.callCompiler { command: Purs.Publish { resolutions: resolutionsFilePath } , version: Just compiler - , cwd: Just packageSourceDir + , cwd: Just source } publishJson <- case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Publishing failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed for your package due to an unknown compiler error:" - , "```" - , err - , "```" - ] + Left error -> + Except.throw $ printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1039,7 +1056,7 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = Left error -> Except.throw $ "Could not publish your package to Pursuit because an error was encountered (cc: @purescript/packaging): " <> error Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + FS.Extra.remove tmp type PursuitResolutions = Map RawPackageName { version :: Version, path :: FilePath } @@ -1050,13 +1067,13 @@ pursuitResolutionsCodec = rawPackageNameMapCodec $ CJ.named "Resolution" $ CJ.Re -- -- Note: This interfaces with Pursuit, and therefore we must add purescript- -- prefixes to all package names for compatibility with the Bower naming format. -formatPursuitResolutions :: { resolutions :: Map PackageName Version, dependenciesDir :: FilePath } -> PursuitResolutions -formatPursuitResolutions { resolutions, dependenciesDir } = +formatPursuitResolutions :: { resolutions :: Map PackageName Version, installedResolutions :: FilePath } -> PursuitResolutions +formatPursuitResolutions { resolutions, installedResolutions } = Map.fromFoldable do Tuple name version <- Map.toUnfoldable resolutions let bowerPackageName = RawPackageName ("purescript-" <> PackageName.print name) - packagePath = Path.concat [ dependenciesDir, PackageName.print name <> "-" <> Version.print version ] + packagePath = Path.concat [ installedResolutions, PackageName.print name <> "-" <> Version.print version ] [ Tuple bowerPackageName { path: packagePath, version } ] -- | Copy files from the package source directory to the destination directory @@ -1163,3 +1180,183 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } + +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +type AdjustManifest = + { source :: FilePath + , compiler :: Version + , manifest :: Manifest + , legacyIndex :: Maybe DependencyIndex + , currentIndex :: CompilerIndex + , resolutions :: Maybe (Map PackageName Version) + } + +-- | Conform a legacy manifest to the Registry requirements for dependencies, +-- | ie. that all direct imports are listed (no transitive dependencies) and no +-- | unused dependencies are listed. +conformLegacyManifest + :: forall r + . Manifest + -> Version + -> CompilerIndex + -> Solver.TransitivizedRegistry + -> ValidateDepsError + -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) +conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do + let + manifestRequired :: SemigroupMap PackageName Intersection + manifestRequired = Solver.initializeRequired manifest.dependencies + + legacyResolutions <- case Solver.solveFull { registry: legacyRegistry, required: manifestRequired } of + Left unsolvableLegacy -> do + Log.warn $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableLegacy + case Solver.solveWithCompiler (Range.exact compiler) currentIndex manifest.dependencies of + Left unsolvableCurrent -> Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableCurrent + Right (Tuple _ solved) -> do + Log.debug $ "Got current resolutions as a fallback to unsolvable legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved + Right solved -> do + Log.debug $ "Got legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved + + let + legacyTransitive :: Map PackageName Range + legacyTransitive = + Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + $ Safe.Coerce.coerce + $ _.required + $ Solver.solveSteps + $ Solver.solveSeed + $ Solver.withReachable { registry: legacyRegistry, required: manifestRequired } + + Log.debug $ "Got transitive solution:\n" <> printJson (Internal.Codec.packageMap Range.codec) legacyTransitive + + let + associateMissing :: Array PackageName -> Map PackageName Range + associateMissing packages = do + -- First we look up the package in the produced transitive ranges, as those + -- are the most likely to be correct. + let associateTransitive pkg = maybe (Left pkg) (\range -> Right (Tuple pkg range)) (Map.lookup pkg legacyTransitive) + let associated = partitionEithers (map associateTransitive packages) + let foundFromTransitive = Map.fromFoldable associated.success + + -- If not found, we search for the ranges described for this dependency + -- in the manifests of the packages in the resolutions. + let + resolutionRanges :: Map PackageName Range + resolutionRanges = do + let + foldFn name prev version = fromMaybe prev do + versions <- Map.lookup name (un SemigroupMap legacyRegistry) + deps <- Map.lookup version (un SemigroupMap versions) + let deps' = Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) (un SemigroupMap deps) + pure $ Map.unionWith (\l r -> fromMaybe l (Range.intersect l r)) prev deps' + + foldlWithIndex foldFn Map.empty legacyResolutions + + foundFromResolutions :: Map PackageName Range + foundFromResolutions = Map.fromFoldable do + associated.fail # Array.mapMaybe \pkg -> map (Tuple pkg) (Map.lookup pkg resolutionRanges) + + Map.union foundFromTransitive foundFromResolutions + + fixUnused names (Manifest m) = do + let unused = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) names + let fixedDependencies = Map.difference m.dependencies unused + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved + + fixMissing names (Manifest m) = do + let fixedDependencies = Map.union m.dependencies (associateMissing (NonEmptySet.toUnfoldable names)) + -- Once we've fixed the missing dependencies we need to be sure we can still + -- produce a viable solution with the current index. + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved + + previousDepsMessage = Array.fold + [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones and add direct-imported ones. " + , "Your dependency list was:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + , "\n```\n" + ] + + newDepsMessage (Manifest new) = Array.fold + [ "\nYour new dependency list is:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) new.dependencies + , "\n```\n" + ] + + case problem of + UnusedDependencies names -> do + Tuple deps resolutions <- fixUnused names (Manifest manifest) + let newManifest = Manifest (manifest { dependencies = deps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + MissingDependencies names -> do + Tuple deps resolutions <- fixMissing names (Manifest manifest) + let newManifest = Manifest (manifest { dependencies = deps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + UnusedAndMissing { missing, unused } -> do + let unused' = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) unused + let trimmed = Map.difference manifest.dependencies unused' + Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) + let newManifest = Manifest (manifest { dependencies = newDeps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" + , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest newResolutions + +type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) + +_compilerCache :: Proxy "compilerCache" +_compilerCache = Proxy + +data CompilerCache :: (Type -> Type -> Type) -> Type -> Type +data CompilerCache c a = Compilation Manifest (Map PackageName Version) Version (c { target :: Version, result :: Either CompilerFailure Unit } a) + +instance Functor2 c => Functor (CompilerCache c) where + map k (Compilation manifest resolutions compiler a) = Compilation manifest resolutions compiler (map2 k a) + +instance FsEncodable CompilerCache where + encodeFs = case _ of + Compilation (Manifest manifest) resolutions compiler next -> do + let + baseKey = "Compilation__" <> PackageName.print manifest.name <> "__" <> Version.print manifest.version <> "__" <> Version.print compiler <> "__" + hashKey = do + let resolutions' = foldlWithIndex (\name prev version -> formatPackageVersion name version <> prev) "" resolutions + unsafePerformEffect $ Sha256.hashString resolutions' + cacheKey = baseKey <> Sha256.print hashKey + + let + codec = CJ.named "FindAllCompilersResult" $ CJ.Record.object + { target: Version.codec + , result: CJ.Common.either compilerFailureCodec CJ.null + } + + Exists.mkExists $ Cache.AsJson cacheKey codec next diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ac64c8e65..ac9ffc398 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -111,11 +111,12 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do , " has no untracked or dirty files, it is safe to pull the latest." ] pure true - Just files -> do - Log.debug $ Array.fold - [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - , NonEmptyArray.foldMap1 (append "\n - ") files - ] + Just _files -> do + -- This is a bit noisy, so commenting it out for now. + -- Log.debug $ Array.fold + -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + -- , NonEmptyArray.foldMap1 (append "\n - ") files + -- ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." diff --git a/app/src/App/CLI/Purs.purs b/app/src/App/CLI/Purs.purs index 7e8d22c90..e5706e3f1 100644 --- a/app/src/App/CLI/Purs.purs +++ b/app/src/App/CLI/Purs.purs @@ -4,6 +4,7 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record @@ -13,6 +14,16 @@ import Node.ChildProcess.Types (Exit(..)) import Node.Library.Execa as Execa import Registry.Version as Version +-- | The minimum compiler version that supports 'purs graph' +minPursGraph :: Version +minPursGraph = unsafeFromRight (Version.parse "0.14.0") + +minPursuitPublish :: Version +minPursuitPublish = unsafeFromRight (Version.parse "0.14.7") + +minLanguageCSTParser :: Version +minLanguageCSTParser = unsafeFromRight (Version.parse "0.15.0") + -- | Call a specific version of the PureScript compiler callCompiler_ :: { version :: Maybe Version, command :: PursCommand, cwd :: Maybe FilePath } -> Aff Unit callCompiler_ = void <<< callCompiler @@ -23,6 +34,22 @@ data CompilerFailure | MissingCompiler derive instance Eq CompilerFailure +derive instance Ord CompilerFailure + +compilerFailureCodec :: CJ.Codec CompilerFailure +compilerFailureCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError CompilerFailure + decode json = except do + map CompilationError (CJ.decode (CJ.array compilerErrorCodec) json) + <|> map UnknownError (CJ.decode CJ.string json) + <|> map (const MissingCompiler) (CJ.decode CJ.null json) + + encode :: CompilerFailure -> JSON + encode = case _ of + CompilationError errors -> CJ.encode (CJ.array compilerErrorCodec) errors + UnknownError message -> CJ.encode CJ.string message + MissingCompiler -> CJ.encode CJ.null unit type CompilerError = { position :: SourcePosition diff --git a/app/src/App/Effect/Cache.purs b/app/src/App/Effect/Cache.purs index 15808ff9d..3ea63452a 100644 --- a/app/src/App/Effect/Cache.purs +++ b/app/src/App/Effect/Cache.purs @@ -169,7 +169,6 @@ handleMemoryFs env = case _ of case inFs of Nothing -> pure $ reply Nothing Just entry -> do - Log.debug $ "Fell back to on-disk entry for " <> memory putMemoryImpl env.ref unit (Key memory (Const entry)) pure $ reply $ Just $ unCache entry Just cached -> @@ -227,8 +226,7 @@ getMemoryImpl ref (Key id (Reply reply)) = do let (unCache :: CacheValue -> b) = unsafeCoerce cache <- Run.liftEffect $ Ref.read ref case Map.lookup id cache of - Nothing -> do - Log.debug $ "No cache entry found for " <> id <> " in memory." + Nothing -> pure $ reply Nothing Just cached -> do pure $ reply $ Just $ unCache cached @@ -237,7 +235,6 @@ putMemoryImpl :: forall x r a. CacheRef -> a -> MemoryEncoding Const a x -> Run putMemoryImpl ref next (Key id (Const value)) = do let (toCache :: x -> CacheValue) = unsafeCoerce Run.liftEffect $ Ref.modify_ (Map.insert id (toCache value)) ref - Log.debug $ "Wrote cache entry for " <> id <> " in memory." pure next deleteMemoryImpl :: forall x r a. CacheRef -> MemoryEncoding Ignore a x -> Run (LOG + EFFECT + r) a @@ -276,7 +273,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readFile path)) >>= case _ of Left _ -> do - Log.debug $ "No cache found for " <> id <> " at path " <> path pure $ reply Nothing Right buf -> do pure $ reply $ Just buf @@ -285,7 +281,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 path)) >>= case _ of Left _ -> do - Log.debug $ "No cache file found for " <> id <> " at path " <> path pure $ reply Nothing Right content -> case JSON.parse content of Left parseError -> do @@ -308,7 +303,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as a buffer: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " as a buffer at path " <> path pure next AsJson id codec (Const value) -> do @@ -318,7 +312,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as JSON: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " at path " <> path <> " as JSON." pure next deleteFsImpl :: forall a b r. FilePath -> FsEncoding Ignore a b -> Run (LOG + AFF + r) a diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..142149bc0 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -8,10 +8,12 @@ import Data.String as String import Registry.API.V1 (JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageJob, InsertPackageSetJob, JobInfo, MatrixJobDetails, PackageJobDetails, PackageSetJobDetails, SQLite, StartJob) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +23,20 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a + = InsertPackageJob InsertPackageJob a + | InsertMatrixJob InsertMatrixJob a + | InsertPackageSetJob InsertPackageSetJob a + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) + | SelectNextPackageJob (Either String (Maybe PackageJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + | DeleteIncompleteJobs a derive instance Functor Db @@ -39,28 +48,51 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. +-- | Select all logs for a given job, filtered by loglevel. selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) +selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow + +-- | Insert a new package job into the database. +insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) Unit +insertPackageJob job = Run.lift _db (InsertPackageJob job unit) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) Unit +insertMatrixJob job = Run.lift _db (InsertMatrixJob job unit) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) Unit +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job unit) + +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Select the next package job from the database. +selectNextPackageJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageJobDetails) +selectNextPackageJob = Run.lift _db (SelectNextPackageJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +deleteIncompleteJobs :: forall r. Run (DB + r) Unit +deleteIncompleteJobs = Run.lift _db (DeleteIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +102,52 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log + InsertPackageJob job next -> do + Run.liftEffect $ SQLite.insertPackageJob env.db job pure next - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertMatrixJob job next -> do + Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure next - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + InsertPackageSetJob job next -> do + Run.liftEffect $ SQLite.insertPackageSetJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId - pure $ reply job + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job + pure next + + SelectJobInfo jobId reply -> do + result <- Run.liftEffect $ SQLite.selectJobInfo env.db jobId + pure $ reply result + + SelectNextPackageJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log + pure next + + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success + + DeleteIncompleteJobs next -> do + Run.liftEffect $ SQLite.deleteIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/GitHub.purs b/app/src/App/Effect/GitHub.purs index 584832255..914a3aa92 100644 --- a/app/src/App/Effect/GitHub.purs +++ b/app/src/App/Effect/GitHub.purs @@ -242,8 +242,8 @@ request octokit githubRequest@{ route: route@(GitHubRoute method _ _), codec } = -- auto-expire cache entries. We will be behind GitHub at most this amount per repo. -- -- TODO: This 'diff' check should be removed once we have conditional requests. - Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 4.0 -> do - Log.debug $ "Found cache entry but it was modified more than 4 hours ago, refetching " <> printedRoute + Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 23.0 -> do + Log.debug $ "Found cache entry but it was modified more than 23 hours ago, refetching " <> printedRoute result <- requestWithBackoff octokit githubRequest Cache.put _githubCache (Request route) (result <#> \resp -> { response: CJ.encode codec resp, modified: now, etag: Nothing }) pure result @@ -265,10 +265,8 @@ requestWithBackoff octokit githubRequest = do Log.debug $ "Making request to " <> route <> " with base URL " <> githubApiUrl result <- Run.liftAff do let - retryOptions = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt err -> case err of + retryOptions = defaultRetry + { retryOnFailure = \attempt err -> case err of UnexpectedError _ -> false DecodeError _ -> false -- https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#exceeding-the-rate-limit diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..a1cb72c0a 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -134,5 +134,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/Effect/PackageSets.purs b/app/src/App/Effect/PackageSets.purs index 5a250ba22..ccd78e1c2 100644 --- a/app/src/App/Effect/PackageSets.purs +++ b/app/src/App/Effect/PackageSets.purs @@ -428,7 +428,7 @@ validatePackageSet (PackageSet set) = do -- We can now attempt to produce a self-contained manifest index from the -- collected manifests. If this fails then the package set is not -- self-contained. - Tuple unsatisfied _ = ManifestIndex.maximalIndex (Set.fromFoldable success) + Tuple unsatisfied _ = ManifestIndex.maximalIndex ManifestIndex.IgnoreRanges (Set.fromFoldable success) -- Otherwise, we can check if we were able to produce an index from the -- package set alone, without errors. diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index cdd00eb1d..bd406ff25 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -252,7 +252,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Writing manifest for " <> formatted <> ":\n" <> printJson Manifest.codec manifest index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left error -> Except.throw $ Array.fold [ "Can't insert " <> formatted <> " into manifest index because it has unsatisfied dependencies:" @@ -275,7 +275,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Deleting manifest for " <> formatted index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left error -> Except.throw $ Array.fold [ "Can't delete " <> formatted <> " from manifest index because it would produce unsatisfied dependencies:" @@ -359,7 +359,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Just metadata -> do Log.debug $ "Successfully read metadata for " <> printedName <> " from path " <> path - Log.debug $ "Setting metadata cache to singleton entry (as cache was previosuly empty)." + Log.debug $ "Setting metadata cache to singleton entry (as cache was previously empty)." Cache.put _registryCache AllMetadata (Map.singleton name metadata) pure $ Just metadata @@ -836,8 +836,9 @@ readManifestIndexFromDisk root = do entries <- map partitionEithers $ for packages.success (ManifestIndex.readEntryFile root) case entries.fail of - [] -> case ManifestIndex.fromSet $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of + [] -> case ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of Left errors -> do + Log.debug $ "Could not read a valid manifest index from entry files: " <> Array.foldMap (Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) <<< NonEmptyArray.toArray) entries.success Except.throw $ append "Unable to read manifest index (some packages are not satisfiable): " $ Array.foldMap (append "\n - ") do Tuple name versions <- Map.toUnfoldable errors Tuple version dependency <- Map.toUnfoldable versions @@ -878,10 +879,10 @@ readAllMetadataFromDisk metadataDir = do entries <- Run.liftAff $ map partitionEithers $ for packages.success \name -> do result <- readJsonFile Metadata.codec (Path.concat [ metadataDir, PackageName.print name <> ".json" ]) - pure $ map (Tuple name) result + pure $ bimap (Tuple name) (Tuple name) result unless (Array.null entries.fail) do - Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (append "\n - ") entries.fail + Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (\(Tuple name err) -> "\n - " <> PackageName.print name <> ": " <> err) entries.fail Log.debug "Successfully read metadata entries." pure $ Map.fromFoldable entries.success diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index a9479d3f5..828759792 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -6,6 +6,8 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.JSDate as JSDate +import Data.String as String +import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff import Effect.Exception as Exception import Effect.Now as Now @@ -20,6 +22,7 @@ import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Legacy.Types (RawVersion(..)) +import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tar as Foreign.Tar import Registry.Location as Location @@ -28,8 +31,15 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +-- | Packages can be published via the legacy importer or a user via the API. We +-- | determine some information differently in these cases, such as the time the +-- | package was published. +data ImportType = Old | Recent + +derive instance Eq ImportType + -- | An effect for fetching package sources -data Source a = Fetch PackageSource FilePath Location String (Either String FetchedSource -> a) +data Source a = Fetch FilePath Location String (Either FetchError FetchedSource -> a) derive instance Functor Source @@ -40,27 +50,42 @@ _source = Proxy type FetchedSource = { path :: FilePath, published :: DateTime } +data FetchError + = GitHubOnly + | NoSubdir + | InaccessibleRepo Octokit.Address + | NoToplevelDir + | Fatal String + +printFetchError :: FetchError -> String +printFetchError = case _ of + GitHubOnly -> "Packages are only allowed to come from GitHub for now. See issue #15." + NoSubdir -> "Monorepos and the `subdir` key are not supported yet. See issue #16." + InaccessibleRepo { owner, repo } -> "Repository located at https://github.com/" <> owner <> "/" <> repo <> ".git is inaccessible or does not exist." + NoToplevelDir -> "Downloaded tarball has no top-level directory." + Fatal err -> "Unrecoverable error. " <> err + -- | Fetch the provided location to the provided destination path. -fetch :: forall r. PackageSource -> FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource -fetch source destination location ref = Except.rethrow =<< Run.lift _source (Fetch source destination location ref identity) +fetch :: forall r. FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource +fetch destination location ref = (Except.rethrow <<< lmap printFetchError) =<< Run.lift _source (Fetch destination location ref identity) -- | Run the SOURCE effect given a handler. interpret :: forall r a. (Source ~> Run r) -> Run (SOURCE + r) a -> Run r a interpret handler = Run.interpret (Run.on _source handler Run.send) -- | Handle the SOURCE effect by downloading package source to the file system. -handle :: forall r a. Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a -handle = case _ of - Fetch source destination location ref reply -> map (map reply) Except.runExcept do +handle :: forall r a. ImportType -> Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a +handle importType = case _ of + Fetch destination location ref reply -> map (map reply) Except.runExcept do Log.info $ "Fetching " <> printJson Location.codec location case location of Git _ -> do -- TODO: Support non-GitHub packages. Remember subdir when doing so. (See #15) - Except.throw "Packages are only allowed to come from GitHub for now. See #15" + Except.throw GitHubOnly GitHub { owner, repo, subdir } -> do -- TODO: Support subdir. In the meantime, we verify subdir is not present. (See #16) - when (isJust subdir) $ Except.throw "`subdir` is not supported for now. See #16" + when (isJust subdir) $ Except.throw NoSubdir case pursPublishMethod of -- This needs to be removed so that we can support non-GitHub packages (#15) @@ -73,41 +98,64 @@ handle = case _ of Log.debug $ "Using legacy Git clone to fetch package source at tag: " <> show { owner, repo, ref } let - repoDir = Path.concat [ destination, repo ] - - clonePackageAtTag = do - let url = Array.fold [ "https://github.com/", owner, "/", repo ] - let args = [ "clone", url, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] - withRetryOnTimeout (Git.gitCLI args Nothing) >>= case _ of - Cancelled -> Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> url <> " " <> ref - Failed err -> Aff.throwError $ Aff.error err - Succeeded _ -> pure unit + repoDir = Path.concat [ destination, repo <> "-" <> ref ] + + -- If a git clone is cancelled by the timeout, but had partially-cloned, then it will + -- leave behind files that prevent a retry. + retryOpts = defaultRetry + { cleanupOnCancel = FS.Extra.remove repoDir + , timeout = Milliseconds 15_000.0 + } + + cloneUrl = + Array.fold [ "https://github.com/", owner, "/", repo ] + + cloneArgs = + [ "clone", cloneUrl, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] + + clonePackageAtTag = + withRetry retryOpts (Git.gitCLI cloneArgs Nothing) >>= case _ of + Cancelled -> + Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> cloneUrl <> " " <> ref + Failed err -> + Aff.throwError $ Aff.error err + Succeeded _ -> + pure unit Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of + Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error -> do + Log.warn $ "Git clone command failed:\n " <> String.joinWith " " (Array.cons "git" cloneArgs) Log.error $ "Failed to clone git tag: " <> Aff.message error - Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref - Right _ -> Log.debug $ "Cloned package source to " <> repoDir + + -- We'll receive this message if we try to clone a repo which doesn't + -- exist, which is interpreted as an attempt to fetch a private repo. + let missingRepoErr = "fatal: could not read Username for 'https://github.com': terminal prompts disabled" + + if String.contains (String.Pattern missingRepoErr) (Aff.message error) then + Except.throw $ InaccessibleRepo { owner, repo } + else + Except.throw $ Fatal $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Log.debug $ "Getting published time..." let - getRefTime = case source of - LegacyPackage -> do - timestamp <- Except.rethrow =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) + getRefTime = case importType of + Old -> do + timestamp <- (Except.rethrow <<< lmap Fatal) =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) jsDate <- Run.liftEffect $ JSDate.parse timestamp dateTime <- case JSDate.toDateTime jsDate of - Nothing -> Except.throw $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate + Nothing -> Except.throw $ Fatal $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate Just parsed -> pure parsed pure dateTime - CurrentPackage -> + Recent -> Run.liftEffect Now.nowDateTime -- Cloning will result in the `repo` name as the directory name publishedTime <- Except.runExcept getRefTime >>= case _ of Left error -> do - Log.error $ "Failed to get published time: " <> error - Except.throw $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." + Log.error $ "Failed to get published time. " <> printFetchError error + Except.throw $ Fatal $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." Right value -> pure value pure { path: repoDir, published: publishedTime } @@ -122,12 +170,12 @@ handle = case _ of commit <- GitHub.getRefCommit { owner, repo } (RawVersion ref) >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at ref " <> ref <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref + Except.throw $ Fatal $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref Right result -> pure result GitHub.getCommitDate { owner, repo } commit >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at commit " <> commit <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref + Except.throw $ Fatal $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref Right a -> pure a let tarballName = ref <> ".tar.gz" @@ -139,16 +187,16 @@ handle = case _ of Run.liftAff $ Fetch.withRetryRequest archiveUrl {} case response of - Cancelled -> Except.throw $ "Could not download " <> archiveUrl + Cancelled -> Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.FetchError error) -> do Log.error $ "Failed to download " <> archiveUrl <> " because of an HTTP error: " <> Exception.message error - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do arrayBuffer <- Run.liftAff arrayBufferAff buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) Log.error $ "Failed to download " <> archiveUrl <> " because of a non-200 status code (" <> show status <> ") with body " <> bodyString - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Succeeded { arrayBuffer: arrayBufferAff } -> do arrayBuffer <- Run.liftAff arrayBufferAff Log.debug $ "Successfully downloaded " <> archiveUrl <> " into a buffer." @@ -156,14 +204,14 @@ handle = case _ of Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of Left error -> do Log.error $ "Downloaded " <> archiveUrl <> " but failed to write it to the file at path " <> absoluteTarballPath <> ":\n" <> Aff.message error - Except.throw $ "Could not download " <> archiveUrl <> " due to an internal error." + Except.throw $ Fatal $ "Could not download " <> archiveUrl <> " due to an internal error." Right _ -> Log.debug $ "Tarball downloaded to " <> absoluteTarballPath Log.debug "Verifying tarball..." Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of Nothing -> - Except.throw "Downloaded tarball from GitHub has no top-level directory." + Except.throw NoToplevelDir Just path -> do Log.debug "Extracting the tarball..." Tar.extract { cwd: destination, archive: tarballName } diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 2c02604c4..56422ab64 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -58,7 +58,7 @@ main = launchAff_ $ do Right packageOperation -> case packageOperation of Publish payload -> - API.publish CurrentPackage payload + API.publish Nothing payload Authenticated payload -> do -- If we receive an authenticated operation via GitHub, then we -- re-sign it with pacchettibotti credentials if and only if the @@ -98,10 +98,11 @@ main = launchAff_ $ do # Registry.interpret (Registry.handle registryEnv) # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret Source.handle + # Source.interpret (Source.handle Source.Recent) # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) -- Caching & logging # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) + # Cache.interpret API._compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) # Log.interpret (Log.handleTerminal Verbose) diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 7788b16c2..7197a6001 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -11,7 +11,6 @@ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Either as Either import Data.Exists as Exists -import Data.FunctorWithIndex (mapWithIndex) import Data.Map (SemigroupMap(..)) import Data.Map as Map import Data.Ord.Max (Max(..)) @@ -38,7 +37,7 @@ import Registry.App.Legacy.LenientRange as LenientRange import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (LegacyPackageSet(..), LegacyPackageSetEntry, LegacyPackageSetUnion, RawPackageName(..), RawVersion(..), RawVersionRange(..), legacyPackageSetCodec, legacyPackageSetUnionCodec, rawPackageNameMapCodec, rawVersionCodec, rawVersionRangeCodec) -import Registry.Foreign.Octokit (Address, GitHubError) +import Registry.Foreign.Octokit (Address, GitHubError(..)) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp import Registry.License as License @@ -61,7 +60,8 @@ type LegacyManifest = } toManifest :: PackageName -> Version -> Location -> LegacyManifest -> Manifest -toManifest name version location { license, description, dependencies } = do +toManifest name version location legacy = do + let { license, description, dependencies } = patchLegacyManifest name version legacy let includeFiles = Nothing let excludeFiles = Nothing let owners = Nothing @@ -140,21 +140,13 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr Left bowerError, Left _ -> Left bowerError Right bowerDeps, Left _ -> Right bowerDeps Left _, Right spagoDeps -> Right spagoDeps - Right bowerDeps, Right spagoDeps -> Right do - bowerDeps # mapWithIndex \package range -> - case Map.lookup package spagoDeps of - Nothing -> range - Just spagoRange -> Range.union range spagoRange + Right bowerDeps, Right spagoDeps -> Right $ Map.unionWith Range.union bowerDeps spagoDeps unionPackageSets = case maybePackageSetDeps, unionManifests of Nothing, Left manifestError -> Left manifestError Nothing, Right manifestDeps -> Right manifestDeps Just packageSetDeps, Left _ -> Right packageSetDeps - Just packageSetDeps, Right manifestDeps -> Right do - packageSetDeps # mapWithIndex \package range -> - case Map.lookup package manifestDeps of - Nothing -> range - Just manifestRange -> Range.union range manifestRange + Just packageSetDeps, Right manifestDeps -> Right $ Map.unionWith Range.union manifestDeps packageSetDeps Run.Except.rethrowAt _legacyManifestError unionPackageSets @@ -173,6 +165,44 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr pure { license, dependencies, description } +-- | Some legacy manifests must be patched to be usable. +patchLegacyManifest :: PackageName -> Version -> LegacyManifest -> LegacyManifest +patchLegacyManifest name version legacy = do + let bolson = unsafeFromRight (PackageName.parse "bolson") + let hyrule = unsafeFromRight (PackageName.parse "hyrule") + + let unsafeVersion = unsafeFromRight <<< Version.parse + let unsafeRange a b = unsafeFromJust (Range.mk (unsafeVersion a) (unsafeVersion b)) + let fixRange pkg range = Map.update (\_ -> Just range) pkg + + -- hyrule v2.2.0 removes a module that breaks all versions of bolson + -- prior to the versions below + let earlyHyruleFixedRange = unsafeRange "1.6.4" "2.2.0" + let earlyFixHyrule = fixRange hyrule earlyHyruleFixedRange + + -- hyrule v2.4.0 removes a module that breaks all versions of bolson, deku, + -- and rito prior to the versions below + let hyruleFixedRange = unsafeRange "2.0.0" "2.4.0" + let fixHyrule = fixRange hyrule hyruleFixedRange + + -- bolson v0.3.1 changes the type of a function that breaks deku until 0.9.21 + let bolsonFixedRange = unsafeRange "0.1.0" "0.3.2" + let fixBolson = fixRange bolson bolsonFixedRange + + case PackageName.print name of + "bolson" + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.4.0" -> legacy { dependencies = fixHyrule legacy.dependencies } + "deku" + | version < unsafeVersion "0.7.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.9.21" -> legacy { dependencies = fixBolson (fixHyrule legacy.dependencies) } + | version < unsafeVersion "0.9.25" -> legacy { dependencies = fixHyrule legacy.dependencies } + "rito" + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.3.5" -> legacy { dependencies = fixHyrule legacy.dependencies } + _ -> + legacy + _legacyManifestError :: Proxy "legacyManifestError" _legacyManifestError = Proxy @@ -224,16 +254,22 @@ fetchLegacyManifestFiles :: forall r . Address -> RawVersion - -> Run (GITHUB + LOG + AFF + EFFECT + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) + -> Run (GITHUB + LOG + AFF + EFFECT + EXCEPT String + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) fetchLegacyManifestFiles address ref = do eitherBower <- fetchBowerfile address ref - void $ flip ltraverse eitherBower \error -> - Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherBower case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error eitherSpago <- fetchSpagoDhallJson address ref - void $ flip ltraverse eitherSpago \error -> - Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherSpago case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error pure $ case eitherBower, eitherSpago of - Left _, Left _ -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available." } + Left errL, Left errR -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available: " <> Octokit.printGitHubError errL <> ", " <> Octokit.printGitHubError errR } Right bower, Left _ -> Right $ This bower Left _, Right spago -> Right $ That spago Right bower, Right spago -> Right $ Both bower spago diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..19bfac1eb --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,103 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Debug (traceM) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import Node.Process as Process +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = do + traceM 1 + createServerEnv # Aff.runAff_ case _ of + Left error -> do + traceM 2 + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + traceM 3 + Aff.launchAff_ $ healthcheck env + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: ServerEnv -> Aff Unit + healthcheck env = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = do + traceM 4 + Fetch.Retry.withRetryRequest env.vars.resourceEnv.healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + traceM 5 + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + traceM 6 + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + traceM 7 + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + traceM 8 + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> do + traceM 9 + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + traceM 10 + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + traceM 11 + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." + + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + traceM 12 + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 + + loop restartDelay = do + traceM 13 + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC + + traceM 14 + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 311a15aa5..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -1,6 +1,5 @@ module Registry.App.Prelude ( LogVerbosity(..) - , PackageSource(..) , PursPublishMethod(..) , Retry , RetryResult(..) @@ -23,7 +22,6 @@ module Registry.App.Prelude , parseYaml , partitionEithers , printJson - , printPackageSource , pursPublishMethod , readJsonFile , readYamlFile @@ -62,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra @@ -173,7 +171,9 @@ withRetryOnTimeout = withRetry defaultRetry type Retry err = { timeout :: Aff.Milliseconds + , cleanupOnCancel :: Extra.Aff Unit , retryOnCancel :: Int -> Boolean + , cleanupOnFailure :: err -> Extra.Aff Unit , retryOnFailure :: Int -> err -> Boolean } @@ -182,7 +182,9 @@ type Retry err = defaultRetry :: forall err. Retry err defaultRetry = { timeout: Aff.Milliseconds 5000.0 + , cleanupOnCancel: pure unit , retryOnCancel: \attempt -> attempt <= 3 + , cleanupOnFailure: \_ -> pure unit , retryOnFailure: \_ _ -> false } @@ -196,7 +198,7 @@ derive instance (Eq err, Eq a) => Eq (RetryResult err a) -- | Attempt an effectful computation that can fail by specifying how to retry -- | the request and whether it should time out. withRetry :: forall err a. Retry err -> Extra.Aff (Either.Either err a) -> Extra.Aff (RetryResult err a) -withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } action = do +withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure, cleanupOnCancel, cleanupOnFailure } action = do let runAction :: Extra.Aff (Either.Either err a) -> Int -> Extra.Aff (RetryResult err a) runAction action' ms = do @@ -217,14 +219,18 @@ withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } a Cancelled -> if retryOnCancel attempt then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnCancel retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnCancel pure Cancelled Failed err -> if retryOnFailure attempt err then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnFailure err retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnFailure err pure (Failed err) Succeeded result -> pure (Succeeded result) @@ -255,15 +261,3 @@ data PursPublishMethod = LegacyPursPublish | PursPublish -- | The current purs publish method pursPublishMethod :: PursPublishMethod pursPublishMethod = LegacyPursPublish - --- | Operations can be exercised for old, pre-registry packages, or for packages --- | which are on the 0.15 compiler series. If a true legacy package is uploaded --- | then we do not require compilation to succeed and we don't publish docs. -data PackageSource = LegacyPackage | CurrentPackage - -derive instance Eq PackageSource - -printPackageSource :: PackageSource -> String -printPackageSource = case _ of - LegacyPackage -> "legacy" - CurrentPackage -> "current" diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..97521d202 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,11 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PACKAGE_JOBS_TABLE = 'package_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +17,152 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPackageJobImpl = (db, job) => { + const columns = [ 'jobId', 'jobType', 'payload' ] + return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const insertMatrixJobImpl = (db, job) => { + const columns = [ 'jobId', 'compilerVersion', 'payload' ] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const insertPackageSetJobImpl = (db, job) => { + const columns = [ 'jobId', 'payload' ] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +export const selectNextPackageJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectNextMatrixJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${MATRIX_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectNextPackageSetJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); + const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? + `; + + const params = [jobId, logLevel]; + + if (since !== null) { + query += ' AND timestamp >= ?'; + params.push(since); + } + + query += ' ORDER BY timestamp ASC'; + + const stmt = db.prepare(query); + return stmt.all(...params); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..208befb9a 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,413 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + +-- TOMORROW: +-- +-- * Add the job executor to server startup +-- * Move the various job details to the API.V1 module since it'll be returned by the UI +-- * Update the router to just create a job when received, and on lookup to return relevant details from the db +-- * Update the router to have an endpoint for creating a package set job and compiler matrix job using the +-- same authentication requirements as for GitHub today. +-- * Move the compiler matrix out of publish into its own functionality so it can be called. We want to +-- be able to spawn a matrix job at any time for a compiler/package version pair, but need a helper to +-- do the whole toposort thing. +-- * Update job execution to actually call the relevant publish/unpublish/transfer/package set API fn +-- +-- LATER +-- * Update tests that refer to the DB effect +-- * Adjust the integration test(s) to verify we're getting enforced concurrency control +-- * Update the GitHub issue module so it only submits a request to the registry and returns +-- a job id, rather than actually running the fns directly. Poll for a result still and +-- comment when the job completes. +-- +-- FOLLOWUP +-- * Punt on the squash commit until later. module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob - , SQLite + ( SQLite + , ConnectOptions , connect - , createJob - , deleteIncompleteJobs + , JobInfo + , selectJobInfo + , InsertPackageJob + , insertPackageJob + , InsertMatrixJob + , insertMatrixJob + , InsertPackageSetJob + , insertPackageSetJob + , FinishJob , finishJob - , insertLog - , runningJobForPackage - , selectJob + , StartJob + , startJob + , deleteIncompleteJobs + , insertLogLine , selectLogsByJob + , PackageJobDetails + , selectNextPackageJob + , MatrixJobDetails + , selectNextMatrixJob + , PackageSetJobDetails + , selectNextPackageSetJob ) where import Registry.App.Prelude -import Data.Array as Array +import Codec.JSON.DecodeError as JSON.DecodeError import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Nullable as Nullable +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Registry.API.V1 (JobId(..), LogLevel, LogLine) import Registry.API.V1 as API.V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType +import Registry.Operation (PackageOperation, PackageSetOperation) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit - -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + isSuccess <- case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: isSuccess + } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +selectJobInfo db (JobId jobId) = do + maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId + pure $ traverse jobInfoFromJSRep maybeJobInfo + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: if success then 1 else 0 + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSStartJob = + { jobId :: String + , startedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit + +deleteIncompleteJobs :: SQLite -> Effect Unit +deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl -type NewJob = +type InsertPackageJob = { jobId :: JobId - , jobType :: JobType - , createdAt :: DateTime - , packageName :: PackageName - , ref :: String + , payload :: PackageOperation } -type JSNewJob = +type JSInsertPackageJob = { jobId :: String , jobType :: String - , createdAt :: String - , packageName :: String - , ref :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId - , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt - , packageName: PackageName.print packageName - , ref +insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep { jobId, payload } = + { jobId: un JobId jobId + , jobType: JobType.print case payload of + Operation.Publish _ -> JobType.PublishJob + Operation.Authenticated { payload: Operation.Unpublish _ } -> JobType.UnpublishJob + Operation.Authenticated { payload: Operation.Transfer _ } -> JobType.TransferJob + , payload: stringifyJson Operation.packageOperationCodec payload } -type JobResult = +foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit +insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertPackageJobToJSRep + +type InsertMatrixJob = { jobId :: JobId - , finishedAt :: DateTime - , success :: Boolean + , compilerVersion :: Version + , payload :: Map PackageName Version } -type JSJobResult = +type JSInsertMatrixJob = { jobId :: String - , finishedAt :: String - , success :: Int + , compilerVersion :: String + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep { jobId, compilerVersion, payload } = + { jobId: un JobId jobId + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload + } + +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit +insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep + +type InsertPackageSetJob = + { jobId :: JobId + , payload :: PackageSetOperation } -type Job = +type JSInsertPackageSetJob = + { jobId :: String + , payload :: String + } + +insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep { jobId, payload } = + { jobId: un JobId jobId + , payload: stringifyJson Operation.packageSetOperationCodec payload + } + +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep + +type PackageJobDetails = { jobId :: JobId - , jobType :: JobType , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PackageOperation , createdAt :: DateTime - , finishedAt :: Maybe DateTime - , success :: Boolean + , startedAt :: Maybe DateTime } -type JSJob = +type JSPackageJobDetails = { jobId :: String - , jobType :: String , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String , createdAt :: String - , finishedAt :: Nullable String - , success :: Int + , startedAt :: Nullable String } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } +packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails +packageJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + version <- Version.parse packageVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + pure + { jobId: JobId jobId + , packageName: name + , packageVersion: version + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) + +selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) +selectNextPackageJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db + pure $ traverse packageJobDetailsFromJSRep maybeJobDetails + +type MatrixJobDetails = + { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSMatrixJobDetails = + { jobId :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextMatrixJobImpl :: EffectFn1 SQLite (Nullable JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +type PackageSetJobDetails = + { jobId :: JobId + , payload :: PackageSetOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSPackageSetJobDetails = + { jobId :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt } = do + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + pure + { jobId: JobId jobId + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageSetJobImpl :: EffectFn1 SQLite (Nullable JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp + } -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int (Nullable String) (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = map (DateTime.format Internal.Format.iso8601DateTime) since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + (Nullable.toNullable timestamp) + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index 659b4ad8a..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,345 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Data.Codec.JSON as CJ -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Newtype (unwrap) -import Data.String as String -import Data.UUID.Random as UUID -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish CurrentPackage publish - - Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." - - Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest env.vars.resourceEnv.healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port: 8080 - , onStarted - } - { route: V1.routes - , router: runServer env router - } - pure unit - where - onStarted :: Effect Unit - onStarted = do - Console.log $ String.joinWith "\n" - [ " ┌───────────────────────────────────────────┐" - , " │ Server now up on port 8080 │" - , " │ │" - , " │ To test, run: │" - , " │ > curl -v localhost:8080/api/v1/jobs │" - , " └───────────────────────────────────────────┘" - ] - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret Source.handle - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..1f6fdc489 --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,188 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Comment (COMMENT) +import Registry.App.Effect.Comment as Comment +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.deleteIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Comment.interpret Comment.handleLog + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs new file mode 100644 index 000000000..125a9a7a3 --- /dev/null +++ b/app/src/App/Server/JobExecutor.purs @@ -0,0 +1,91 @@ +module Registry.App.Server.JobExecutor where + +import Registry.App.Prelude hiding ((/)) + +import Control.Parallel as Parallel +import Data.DateTime (DateTime) +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Registry.App.API as API +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.Operation as Operation +import Run (Run) +import Run.Except (EXCEPT) + +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = + Db.selectNextPackageJob >>= case _ of + Just job -> pure $ Just $ PackageJob job + Nothing -> Db.selectNextMatrixJob >>= case _ of + Just job -> pure $ Just $ MatrixJob job + Nothing -> Db.selectNextPackageSetJob >>= case _ of + Just job -> pure $ Just $ PackageSetJob job + Nothing -> pure Nothing + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = runEffects env do + Db.deleteIncompleteJobs + loop + where + loop = do + mJob <- findNextAvailableJob + case mJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 100.0) + loop + + Just job -> do + now <- nowUTC + + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + Db.startJob { jobId, startedAt: now } + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- liftAff do + let execute = Just <$> (runEffects env $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> unwrap jobId <> " timed out." + pure false + + Just (Left err) -> do + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err + pure false + + Just (Right _) -> do + Log.info $ "Job " <> unwrap jobId <> " succeeded." + pure true + + Db.finishJob { jobId, finishedAt: now, success } + loop + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob _ = case _ of + PackageJob { payload: Operation.Publish p } -> + API.publish Nothing p + PackageJob { payload: Operation.Authenticated auth } -> + API.authenticated auth + + MatrixJob _details -> + pure unit -- UNIMPLEMENTED + PackageSetJob _details -> + pure unit -- UNIMPLEMENTED diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..350dcfb86 --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,119 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Cont (ContT) +import Data.Codec.JSON as CJ +import Data.String as String +import Data.UUID.Random as UUID +import Effect.Aff as Aff +import Effect.Class.Console as Console +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.Operation (PackageOperation) +import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Run (Run) +import Run.Except as Run.Except + +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port: 8080 + , onStarted + } + { route: V1.routes + , router: runServer + } + where + onStarted :: Effect Unit + onStarted = do + Console.log $ String.joinWith "\n" + [ " ┌───────────────────────────────────────────┐" + , " │ Server now up on port 8080 │" + , " │ │" + , " │ To test, run: │" + , " │ > curl -v localhost:8080/api/v1/jobs │" + , " └───────────────────────────────────────────┘" + ] + + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + forkPackageJob $ Operation.Publish publish + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected transfer operation." + + Jobs, Get -> do + jsonOk (CJ.array V1.jobCodec) [{ jobId: wrap "foo", createdAt: bottom, finishedAt: Nothing, success: true, logs: [] }] + + Job jobId { level: maybeLogLevel, since }, Get -> do + let logLevel = fromMaybe Error maybeLogLevel + logs <- lift $ Db.selectLogsByJob jobId logLevel since + lift (Run.Except.runExcept $ Db.selectJobInfo jobId) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.notFound + Right Nothing -> + HTTPurple.notFound + Right (Just job) -> + jsonOk V1.jobCodec + { jobId + , createdAt: job.createdAt + , finishedAt: job.finishedAt + , success: job.success + , logs + } + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound + where + forkPackageJob :: PackageOperation -> ContT Response (Run _) Response + forkPackageJob operation = do + lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) + jobId <- newJobId + lift $ Db.insertPackageJob { jobId, payload: operation } + jsonOk V1.jobCreatedResponseCodec { jobId } + + newJobId :: forall m. MonadEffect m => m JobId + newJobId = liftEffect do + id <- UUID.make + pure $ JobId $ UUID.toString id diff --git a/app/src/Fetch/Retry.purs b/app/src/Fetch/Retry.purs index 4260f6e46..cd182385a 100644 --- a/app/src/Fetch/Retry.purs +++ b/app/src/Fetch/Retry.purs @@ -43,10 +43,8 @@ withRetryRequest url opts = withRetry retry do if response.status >= 400 then Left $ StatusError response else Right response - retry = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt -> case _ of + retry = defaultRetry + { retryOnFailure = \attempt -> case _ of FetchError _ -> false StatusError { status } -> -- We retry on 500-level errors in case the server is temporarily diff --git a/app/test/App/API.purs b/app/test/App/API.purs index faf4df3a3..7d1861052 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -9,6 +9,7 @@ import Data.Set as Set import Data.String as String import Data.String.NonEmpty as NonEmptyString import Effect.Aff as Aff +import Effect.Class.Console as Console import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path @@ -27,8 +28,10 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Manifest as Manifest +import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName import Registry.Range as Range +import Registry.Solver as Solver import Registry.Test.Assert as Assert import Registry.Test.Assert.Run as Assert.Run import Registry.Test.Utils as Utils @@ -57,22 +60,21 @@ spec = do removeIgnoredTarballFiles copySourceFiles - Spec.describe "Parses installed paths" do - Spec.it "Parses install path /my-package-1.0.0/..." do - tmp <- Tmp.mkTmpDir - let moduleA = Path.concat [ tmp, "my-package-1.0.0", "src", "ModuleA.purs" ] - case API.parseInstalledModulePath { prefix: tmp, path: moduleA } of - Left err -> Assert.fail $ "Expected to parse " <> moduleA <> " but got error: " <> err - Right { name, version } -> do - Assert.shouldEqual name (Utils.unsafePackageName "my-package") - Assert.shouldEqual version (Utils.unsafeVersion "1.0.0") - FS.Extra.remove tmp - Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do + logs <- liftEffect (Ref.new []) + let + toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry + toLegacyIndex = + Solver.exploreAllTransitiveDependencies + <<< Solver.initializeRegistry + <<< map (map (_.dependencies <<< un Manifest)) + <<< ManifestIndex.toMap + testEnv = { workdir + , logs , index , metadata , pursuitExcludes: Set.singleton (Utils.unsafePackageName "type-equality") @@ -81,7 +83,7 @@ spec = do , github: githubDir } - Assert.Run.runTestEffects testEnv do + result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do -- We'll publish effect@4.0.0 from the fixtures/github-packages -- directory, which has an unnecessary dependency on 'type-equality' -- inserted into it. @@ -90,15 +92,17 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.4" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version , resolutions: Nothing } -- First, we publish the package. - API.publish CurrentPackage publishArgs + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -127,9 +131,22 @@ spec = do , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies ] + -- We should verify the resulting metadata file is correct + Metadata effectMetadata <- Registry.readMetadata name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print name <> " to be in metadata." + Just m -> pure m + + case Map.lookup version effectMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." + Just published -> do + let many' = NonEmptyArray.toArray published.compilers + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + -- Finally, we can verify that publishing the package again should fail -- since it already exists. - Except.runExcept (API.publish CurrentPackage publishArgs) >>= case _ of + Except.runExcept (API.publish Nothing publishArgs) >>= case _ of Left _ -> pure unit Right _ -> Except.throw $ "Expected publishing " <> formatPackageVersion name version <> " twice to fail." @@ -138,14 +155,66 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.4" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" + , resolutions: Nothing + } + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + + -- We can also verify that transitive dependencies are added for legacy + -- packages. + let + transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } + transitivePublishArgs = + { compiler: Utils.unsafeVersion "0.15.4" + , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } + , name: transitive.name + , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } - API.publish CurrentPackage pursuitOnlyPublishArgs + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + + -- We should verify the resulting metadata file is correct + Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in metadata." + Just m -> pure m + + case Map.lookup transitive.version transitiveMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." + Just published -> do + let many' = NonEmptyArray.toArray published.compilers + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + + Registry.readManifest transitive.name transitive.version >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in manifest index." + Just (Manifest manifest) -> do + let expectedDeps = Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeRange ">=6.0.0 <7.0.0") + when (manifest.dependencies /= expectedDeps) do + Except.throw $ String.joinWith "\n" + [ "Expected transitive@1.0.0 to have dependencies" + , printJson (Internal.Codec.packageMap Range.codec) expectedDeps + , "\nbut got" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + ] + case result of + Left exn -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Got an Aff exception! " <> Aff.message exn + Right (Left err) -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err + Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do @@ -207,7 +276,7 @@ checkBuildPlanToResolutions = do Spec.it "buildPlanToResolutions produces expected resolutions file format" do Assert.shouldEqual generatedResolutions expectedResolutions where - dependenciesDir = "testDir" + installedResolutions = "testDir" resolutions = Map.fromFoldable [ Tuple (Utils.unsafePackageName "prelude") (Utils.unsafeVersion "1.0.0") @@ -218,14 +287,14 @@ checkBuildPlanToResolutions = do generatedResolutions = API.formatPursuitResolutions { resolutions - , dependenciesDir + , installedResolutions } expectedResolutions = Map.fromFoldable do packageName /\ version <- (Map.toUnfoldable resolutions :: Array _) let bowerName = RawPackageName ("purescript-" <> PackageName.print packageName) - path = Path.concat [ dependenciesDir, PackageName.print packageName <> "-" <> Version.print version ] + path = Path.concat [ installedResolutions, PackageName.print packageName <> "-" <> Version.print version ] pure $ Tuple bowerName { path, version } removeIgnoredTarballFiles :: Spec.Spec Unit diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..8276bf708 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 8e8207974..414b09a57 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -2,6 +2,7 @@ module Test.Registry.App.Legacy.PackageSet (spec) where import Registry.App.Prelude +import Data.Array.NonEmpty as NonEmptyArray import Data.DateTime (DateTime(..)) import Data.Either as Either import Data.Map as Map @@ -96,7 +97,7 @@ convertedPackageSet = Left err -> unsafeCrashWith err Right value -> value where - index = unsafeFromRight $ ManifestIndex.fromSet $ Set.fromFoldable + index = unsafeFromRight $ ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable [ mkManifest assert [ console, effect, prelude ] , mkManifest console [ effect, prelude ] , mkManifest effect [ prelude ] @@ -208,6 +209,7 @@ unsafeMetadataEntry (Tuple name version) = do { ref: LenientVersion.raw version , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" , bytes: 0.0 + , compilers: NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2") , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom } diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 2eaca689d..42cc7d6ab 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -11,17 +11,22 @@ module Registry.Test.Assert.Run import Registry.App.Prelude import Data.Array as Array +import Data.Exists as Exists import Data.Foldable (class Foldable) import Data.Foldable as Foldable import Data.FunctorWithIndex (mapWithIndex) import Data.Map as Map import Data.Set as Set import Data.String as String +import Dodo as Dodo import Effect.Aff as Aff import Effect.Now as Now import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path +import Registry.API.V1 (LogLevel) +import Registry.App.API (COMPILER_CACHE) +import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache @@ -39,7 +44,7 @@ import Registry.App.Effect.Pursuit (PURSUIT, Pursuit(..)) import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry (REGISTRY, Registry(..)) import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE, Source(..)) +import Registry.App.Effect.Source (FetchError(..), SOURCE, Source(..)) import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE, Storage) import Registry.App.Effect.Storage as Storage @@ -83,6 +88,7 @@ type TEST_EFFECTS = + RESOURCE_ENV + GITHUB_CACHE + LEGACY_CACHE + + COMPILER_CACHE + COMMENT + LOG + EXCEPT String @@ -93,6 +99,7 @@ type TEST_EFFECTS = type TestEnv = { workdir :: FilePath + , logs :: Ref (Array (Tuple LogLevel String)) , metadata :: Ref (Map PackageName Metadata) , index :: Ref ManifestIndex , pursuitExcludes :: Set PackageName @@ -101,8 +108,8 @@ type TestEnv = , username :: String } -runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff a -runTestEffects env operation = do +runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff (Either Aff.Error a) +runTestEffects env operation = Aff.attempt do resourceEnv <- Env.lookupResourceEnv githubCache <- liftEffect Cache.newCacheRef legacyCache <- liftEffect Cache.newCacheRef @@ -118,18 +125,19 @@ runTestEffects env operation = do # Env.runPacchettiBottiEnv { publicKey: "Unimplemented", privateKey: "Unimplemented" } # Env.runResourceEnv resourceEnv -- Caches + # runCompilerCacheMock # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects # Comment.interpret Comment.handleLog - # Log.interpret (\(Log _ _ next) -> pure next) + # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) # Run.runBaseAff' -- | For testing simple Run functions that don't need the whole environment. runBaseEffects :: forall a. Run (LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff a -runBaseEffects = +runBaseEffects = do Log.interpret (\(Log _ _ next) -> pure next) -- Base effects >>> Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) @@ -141,6 +149,22 @@ runLegacyCacheMemory = Cache.interpret Legacy.Manifest._legacyCache <<< Cache.ha runGitHubCacheMemory :: forall r a. CacheRef -> Run (GITHUB_CACHE + LOG + EFFECT + r) a -> Run (LOG + EFFECT + r) a runGitHubCacheMemory = Cache.interpret GitHub._githubCache <<< Cache.handleMemory +runCompilerCacheMock :: forall r a. Run (COMPILER_CACHE + LOG + r) a -> Run (LOG + r) a +runCompilerCacheMock = Cache.interpret API._compilerCache case _ of + Cache.Get key -> Exists.runExists getImpl (Cache.encodeFs key) + Cache.Put _ next -> pure next + Cache.Delete key -> Exists.runExists deleteImpl (Cache.encodeFs key) + where + getImpl :: forall x z. Cache.FsEncoding Cache.Reply x z -> Run _ x + getImpl = case _ of + Cache.AsBuffer _ (Cache.Reply reply) -> pure $ reply Nothing + Cache.AsJson _ _ (Cache.Reply reply) -> pure $ reply Nothing + + deleteImpl :: forall x z. Cache.FsEncoding Cache.Ignore x z -> Run _ x + deleteImpl = case _ of + Cache.AsBuffer _ (Cache.Ignore next) -> pure next + Cache.AsJson _ _ (Cache.Ignore next) -> pure next + type PursuitMockEnv = { excludes :: Set PackageName , metadataRef :: Ref (Map PackageName Metadata) @@ -179,7 +203,7 @@ handleRegistryMock env = case _ of WriteManifest manifest reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left err -> pure $ reply $ Left $ "Failed to insert manifest:\n" <> Utils.unsafeStringify manifest <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) @@ -187,7 +211,7 @@ handleRegistryMock env = case _ of DeleteManifest name version reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left err -> pure $ reply $ Left $ "Failed to delete entry for :\n" <> Utils.formatPackageVersion name version <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) @@ -282,11 +306,11 @@ type SourceMockEnv = { github :: FilePath } handleSourceMock :: forall r a. SourceMockEnv -> Source a -> Run (EXCEPT String + AFF + EFFECT + r) a handleSourceMock env = case _ of - Fetch _source destination location ref reply -> do + Fetch destination location ref reply -> do now <- Run.liftEffect Now.nowDateTime case location of - Git _ -> pure $ reply $ Left "Packages cannot be published from Git yet (only GitHub)." - GitHub { subdir } | isJust subdir -> pure $ reply $ Left "Packages cannot use the 'subdir' key yet." + Git _ -> pure $ reply $ Left GitHubOnly + GitHub { subdir } | isJust subdir -> pure $ reply $ Left NoSubdir GitHub { repo } -> do let name = stripPureScriptPrefix repo @@ -295,7 +319,7 @@ handleSourceMock env = case _ of localPath = Path.concat [ env.github, dirname ] destinationPath = Path.concat [ destination, dirname <> "-checkout" ] Run.liftAff (Aff.attempt (FS.Aff.stat localPath)) >>= case _ of - Left _ -> pure $ reply $ Left $ "Cannot copy " <> localPath <> " because it does not exist." + Left _ -> pure $ reply $ Left $ Fatal $ "Cannot copy " <> localPath <> " because it does not exist." Right _ -> do Run.liftAff $ FS.Extra.copy { from: localPath, to: destinationPath, preserveTimestamps: true } case pursPublishMethod of diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..2b01deb0b --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,56 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Package-oriented jobs (publish/unpublish/transfer) +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs (one compiler, all packages) +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE package_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..2ad866068 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,45 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/flake.lock b/flake.lock index 71e823ad4..ad7901f70 100644 --- a/flake.lock +++ b/flake.lock @@ -1,22 +1,6 @@ { "nodes": { "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1747046372, - "narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-compat_2": { "flake": false, "locked": { "lastModified": 1696426674, @@ -62,11 +46,11 @@ "registry-index": "registry-index" }, "locked": { - "lastModified": 1749349364, - "narHash": "sha256-2/oMkW1ORJdVa3cMeRfoQj/hR4FfZMs79Jn/h5UrQx8=", + "lastModified": 1764469420, + "narHash": "sha256-ASzYEhZ4F8O+hhbYFmgGnAjlIE1xXDvzwv5ifHglU9c=", "owner": "jeslie0", "repo": "mkSpagoDerivation", - "rev": "fc16120512aaccb9950d7a8acc9198c89c9b2315", + "rev": "24f0b27ca00ac02c86e7a9d2d2115edd356006f3", "type": "github" }, "original": { @@ -77,23 +61,23 @@ }, "nixpkgs": { "locked": { - "lastModified": 1749640896, - "narHash": "sha256-oVVwB+4NH7aGysjLtsxsOPspUpDsa5nSH1G8CtD6fK4=", + "lastModified": 1764638368, + "narHash": "sha256-ln1kqV0B2epgFWUeCy+wupfVSFlpMZF8uu9nuXh8C7c=", "owner": "nixos", "repo": "nixpkgs", - "rev": "bf8a7649aaf6567c0c893db016956f7333a50c38", + "rev": "82b75b674b7263dc71ff8c6f5d2ea70686d22b7e", "type": "github" }, "original": { "owner": "nixos", - "ref": "release-25.05", + "ref": "release-25.11", "repo": "nixpkgs", "type": "github" } }, "purescript-overlay": { "inputs": { - "flake-compat": "flake-compat_2", + "flake-compat": "flake-compat", "nixpkgs": [ "nixpkgs" ], @@ -116,11 +100,11 @@ "registry": { "flake": false, "locked": { - "lastModified": 1747985987, - "narHash": "sha256-NHkksW17JaoiMpexAEJ9EQEygFKuv70CooZlsE7/OPs=", + "lastModified": 1763538112, + "narHash": "sha256-/+ug37TQbiEi9w8Lv+f55IE4i0LvBT7yAeff7Fno3mw=", "owner": "purescript", "repo": "registry", - "rev": "33e4d13c6dfbc908c24dffa35f5e28585a383cd7", + "rev": "b9313da46132ce93f2255ba31708633eef19f5ac", "type": "github" }, "original": { @@ -132,11 +116,11 @@ "registry-index": { "flake": false, "locked": { - "lastModified": 1747925902, - "narHash": "sha256-0eIDKoKhx27wDydfYpxp5rD7UjX0YmX/10I0SMi/vnY=", + "lastModified": 1763537903, + "narHash": "sha256-nHQxIUXFcpnb9V6+eLlyoGE1+Lq/mVO8yWW3bdgV+g0=", "owner": "purescript", "repo": "registry-index", - "rev": "8898112c4cc9d275503a416f9a7ff07b9c675339", + "rev": "d59cc60a616067bba7b39e71de054f596e5c28cd", "type": "github" }, "original": { @@ -147,12 +131,10 @@ }, "root": { "inputs": { - "flake-compat": "flake-compat", "flake-utils": "flake-utils", "mkSpagoDerivation": "mkSpagoDerivation", "nixpkgs": "nixpkgs", - "purescript-overlay": "purescript-overlay", - "slimlock": "slimlock_2" + "purescript-overlay": "purescript-overlay" } }, "slimlock": { @@ -176,26 +158,6 @@ "type": "github" } }, - "slimlock_2": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1746029857, - "narHash": "sha256-431slzM10HQixP4oQlCwGxUPD8wo4DWVGnIcttqyeEs=", - "owner": "thomashoneyman", - "repo": "slimlock", - "rev": "c49740738a026a00ab6be19300e8cf7b6de03fd7", - "type": "github" - }, - "original": { - "owner": "thomashoneyman", - "repo": "slimlock", - "type": "github" - } - }, "systems": { "locked": { "lastModified": 1681028828, diff --git a/flake.nix b/flake.nix index ea77ce3fd..5f4ffed08 100644 --- a/flake.nix +++ b/flake.nix @@ -2,21 +2,15 @@ description = "The PureScript Registry"; inputs = { - nixpkgs.url = "github:nixos/nixpkgs/release-25.05"; + nixpkgs.url = "github:nixos/nixpkgs/release-25.11"; flake-utils.url = "github:numtide/flake-utils"; - flake-compat.url = "github:edolstra/flake-compat"; - flake-compat.flake = false; - purescript-overlay.url = "github:thomashoneyman/purescript-overlay"; purescript-overlay.inputs.nixpkgs.follows = "nixpkgs"; mkSpagoDerivation.url = "github:jeslie0/mkSpagoDerivation"; mkSpagoDerivation.inputs.nixpkgs.follows = "nixpkgs"; mkSpagoDerivation.inputs.ps-overlay.follows = "purescript-overlay"; - - slimlock.url = "github:thomashoneyman/slimlock"; - slimlock.inputs.nixpkgs.follows = "nixpkgs"; }; outputs = @@ -25,7 +19,6 @@ flake-utils, purescript-overlay, mkSpagoDerivation, - slimlock, ... }: let @@ -41,359 +34,121 @@ pureScriptFileset = fileset.intersection (fileset.gitTracked ./.) ( fileset.unions [ ./app - (fileset.maybeMissing ./check) ./foreign ./lib ./scripts ./test-utils + (fileset.maybeMissing ./check) ./spago.lock ./spago.yaml + ./types ] ); npmFileset = fileset.intersection (fileset.gitTracked ./.) ( - fileset.fileFilter (file: file.name == "package.json" || file.name == "package-lock.json") ./. + fileset.fileFilter (f: f.name == "package.json" || f.name == "package-lock.json") ./. ); - # We can't import from remote urls in dhall when running in CI or other - # network-restricted environments, so we fetch the repository and use the - # local path instead. + DHALL_TYPES = ./types; + GIT_LFS_SKIP_SMUDGE = 1; DHALL_PRELUDE = "${ builtins.fetchGit { url = "https://github.com/dhall-lang/dhall-lang"; - rev = "e35f69d966f205fdc0d6a5e8d0209e7b600d90b3"; + rev = "25cf020ab307cb2d66826b0d1ddac8bc89241e27"; } }/Prelude/package.dhall"; - # The location of the Dhall type specifications, used to type-check manifests. - DHALL_TYPES = ./types; - - # We disable git-lfs files explicitly, as this is intended for large files - # (typically >4GB), and source packgaes really ought not be shipping large - # files — just source code. - GIT_LFS_SKIP_SMUDGE = 1; - registryOverlay = final: prev: rec { - nodejs = prev.nodejs_20; - - # We don't want to force everyone to update their configs if they aren't - # normally on flakes. - nixFlakes = prev.writeShellScriptBin "nixFlakes" '' - exec ${prev.nixVersions.stable}/bin/nix --experimental-features "nix-command flakes" "$@" - ''; - - # Detects arguments to 'git' containing a URL and replaces them with a - # local filepath. This is a drop-in replacement for 'git' that should be - # used in offline / test environments when we only want fixture data. - gitMock = - let - nodeScript = - script: - prev.writeScript "node-cmd" '' - ${nodejs}/bin/node -e "${script}" "$@" - ''; - - mock = nodeScript '' - const { URL } = require('url'); - const { spawn } = require('child_process'); - - const repoFixturesDir = process.env.REPO_FIXTURES_DIR; - if (!repoFixturesDir) { - throw new Error('REPO_FIXTURES_DIR is not set, but is required.'); - } - - // Replace any URL arguments with the local fixtures path. - function replaceIfUrl(arg) { - try { - const url = new URL(arg); - const path = url.pathname.replace(/\.git$/, '''); - const file = 'file://' + repoFixturesDir + path; - console.log(file); - return file; - } catch (e) { - // Not a URL, ignore - } - return arg; - } - - const args = process.argv.slice(1); - const modified = []; - for (let i = 0; i < args.length; i++) { - const arg = args[i]; - modified.push(replaceIfUrl(arg)); - } - - const git = spawn('${prev.git}/bin/git', modified); - - git.stdout.on('data', (data) => { - console.log(data.toString('utf8')); - }); - - git.stderr.on('data', (data) => { - console.error(data.toString('utf8')); - }); - - git.on('close', (code) => { - if (code !== 0) { - throw new Error('git exited with code ' + code); - } - }); - ''; - in - prev.writeShellScriptBin "git" '' - exec ${mock} "$@" - ''; - - # Packages associated with the registry, ie. in this repository. - registry = - let - spago-lock = prev.mkSpagoDerivation { - name = "registry"; - src = ./.; - nativeBuildInputs = [ - prev.pkgs.spago-bin.spago-0_93_44 - prev.pkgs.purescript - ]; - buildPhase = "spago build"; - installPhase = "mkdir $out; cp -r * $out"; - }; - - package-lock = - (prev.slimlock.buildPackageLock { - src = fileset.toSource { - root = ./.; - fileset = npmFileset; - }; - omit = [ - "dev" - "peer" - ]; - }) - # better-sqlite3 relies on node-gyp and python3 in the build environment, so - # we add those to the native build inputs. - .overrideAttrs - ( - finalAttrs: prevAttrs: { - nativeBuildInputs = - ( - prevAttrs.nativeBuildInputs or [ ] - ++ [ - prev.python3 - prev.nodePackages.node-gyp - ] - ) - ++ (if prev.stdenv.isDarwin then [ prev.darwin.cctools ] else [ ]); - } - ); + registryLib = import ./nix/lib { lib = nixpkgs.lib; }; - # Produces a list of all PureScript binaries supported by purescript-overlay, - # ie. those from 0.13 onwards, callable using the naming convention - # `purs-MAJOR_MINOR_PATCH`. - # $ purs-0_14_0 --version - # 0.14.0 - # - # To add a new compiler to the list, just update the flake: - # $ nix flake update - supportedCompilers = prev.lib.filterAttrs ( - name: _: (builtins.match "^purs-[0-9]+_[0-9]+_[0-9]+$" name != null) - ) prev.purs-bin; + # Build sources with filesets + spagoSrc = fileset.toSource { + root = ./.; + fileset = pureScriptFileset; + }; - # An attrset containing all the PureScript binaries we want to make - # available. - compilers = prev.symlinkJoin { - name = "purs-compilers"; - paths = prev.lib.mapAttrsToList ( - name: drv: - prev.writeShellScriptBin name '' - exec ${drv}/bin/purs "$@" - '' - ) supportedCompilers; - }; + npmSrc = fileset.toSource { + root = ./.; + fileset = npmFileset; + }; - purs-versions = prev.writeShellScriptBin "purs-versions" '' - echo ${ - prev.lib.concatMapStringsSep " " ( - x: prev.lib.removePrefix "purs-" (builtins.replaceStrings [ "_" ] [ "." ] x) - ) (prev.lib.attrNames supportedCompilers) - } - ''; - in - { - apps = prev.callPackages ./app { - inherit - compilers - purs-versions - package-lock - spago-lock - ; - }; - scripts = prev.callPackages ./scripts { - inherit - compilers - purs-versions - package-lock - spago-lock - ; - }; - inherit - purs-versions - compilers - package-lock - spago-lock - ; - }; + # Overlays + overlays = import ./nix/overlays { + inherit + purescript-overlay + mkSpagoDerivation + registryLib + spagoSrc + npmSrc + ; }; in flake-utils.lib.eachSystem supportedSystems ( system: let pkgs = import nixpkgs { - inherit system; - overlays = [ - purescript-overlay.overlays.default - mkSpagoDerivation.overlays.default - slimlock.overlays.default - registryOverlay - ]; + inherit system overlays; }; - inherit (pkgs) lib; - - # We can't run 'spago test' in our flake checks because it tries to - # write to a cache and I can't figure out how to disable it. Instead - # we supply it as a shell script. - # - # Once we can run 'spago test --offline' or something similar, then this - # should just be a normal derivation that links the node_modules, copies - # the output dir locally, and runs 'spago test'. - # - # $ nix develop --command run-tests-script - run-tests-script = pkgs.writeShellScriptBin "run-tests-script" '' - set -euo pipefail - WORKDIR=$(mktemp -d) - cp spago.yaml spago.lock $WORKDIR - cp -a app foreign lib scripts test-utils types $WORKDIR - ln -s ${pkgs.registry.package-lock}/js/node_modules $WORKDIR/node_modules - - pushd $WORKDIR - export HEALTHCHECKS_URL=${defaultEnv.HEALTHCHECKS_URL} - ${pkgs.spago-bin.spago-0_93_44}/bin/spago test - - popd - ''; - - mkAppOutput = drv: { - type = "app"; - program = "${drv}/bin/${drv.name}"; - meta.description = drv.meta.description or "PureScript Registry ${drv.name}"; - }; - - # A full set of environment variables, each set to their default values - # according to the env.example file, or to the values explicitly set below - # (e.g. DHALL_PRELUDE and DHALL_TYPES). - defaultEnv = parseEnv ./.env.example // { + defaultEnv = registryLib.parseEnv ./.env.example // { inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; }; + in + { + packages = pkgs.registry.apps // pkgs.registry.scripts; - # Parse a .env file, skipping empty lines and comments, into Nix attrset - parseEnv = - path: - let - # Filter out lines only containing whitespace or comments - lines = pkgs.lib.splitString "\n" (builtins.readFile path); - noEmpties = builtins.filter (line: builtins.match "^[[:space:]]*$" line == null) lines; - noComments = builtins.filter (line: builtins.match "^#.*$" line == null) noEmpties; - toKeyPair = - line: - let - parts = pkgs.lib.splitString "=" line; - in - { - name = builtins.head parts; - value = pkgs.lib.concatStrings (builtins.tail parts); - }; - in - builtins.listToAttrs (builtins.map toKeyPair noComments); - - # Allows you to run a local VM with the registry server, mimicking the - # actual deployment. - run-vm = + apps = let - vm-machine = nixpkgs.lib.nixosSystem { - system = builtins.replaceStrings [ "darwin" ] [ "linux" ] system; - modules = [ - { - nixpkgs.overlays = [ - purescript-overlay.overlays.default - mkSpagoDerivation.overlays.default - slimlock.overlays.default - registryOverlay - ]; - } - ./nix/test-vm.nix - { - services.registry-server = { - enable = true; - host = "localhost"; - port = 8080; - enableCerts = false; - # Note: the default credentials are not valid, so you cannot - # actually publish packages, etc. without overriding the relevant - # env vars below. - envVars = defaultEnv; - }; - } - ]; + mkApp = name: drv: { + type = "app"; + program = "${drv}/bin/${drv.name}"; + meta = drv.meta or { }; }; in - pkgs.writeShellScript "run-vm.sh" '' - export NIX_DISK_IMAGE=$(mktemp -u -t nixos.qcow2.XXXXXXX) - trap "rm -f $NIX_DISK_IMAGE" EXIT - ${vm-machine.config.system.build.vm}/bin/run-registry-vm - ''; - in - rec { - packages = pkgs.registry.apps // pkgs.registry.scripts; - - apps = pkgs.lib.mapAttrs (_: drv: mkAppOutput drv) packages // { - default = { - type = "app"; - program = "${run-vm}"; - meta.description = "Run the registry server in a NixOS VM"; - }; - }; + pkgs.lib.mapAttrs mkApp (pkgs.registry.apps // pkgs.registry.scripts); checks = { + spago-test = + pkgs.runCommand "spago-test" + { + nativeBuildInputs = + with pkgs; + [ + nodejs + purs + ] + ++ registry-runtime-deps; + HEALTHCHECKS_URL = defaultEnv.HEALTHCHECKS_URL or ""; + } + '' + cp -r ${pkgs.registry-spago-lock} src && chmod -R +w src && cd src + ln -s ${pkgs.registry-package-lock}/node_modules . + node -e "import('./output/Test.Registry.Main/index.js').then(m => m.main())" + echo "Tests passed!" > $out + ''; + nix-format = pkgs.runCommand "nix-format" { src = fileset.toSource { root = ./.; - fileset = fileset.fileFilter (file: file.hasExt "nix") ./.; + fileset = fileset.fileFilter (f: f.hasExt "nix") ./.; }; - buildInputs = with pkgs; [ nixfmt-rfc-style ]; + nativeBuildInputs = [ pkgs.nixfmt-rfc-style ]; } '' - set -euo pipefail - nixfmt --check $(find $src -type f) | tee $out + nixfmt --check $(find $src -type f) && touch $out ''; purescript-format = - pkgs.runCommand "purescript-format-check" + pkgs.runCommand "purescript-format" { - src = fileset.toSource { - root = ./.; - fileset = pureScriptFileset; - }; - buildInputs = with pkgs; [ purs-tidy ]; + src = spagoSrc; + nativeBuildInputs = [ pkgs.purs-tidy ]; } '' - set -euo pipefail - purs-tidy check $src | tee $out + purs-tidy check $src && touch $out ''; - # This script verifies that - # - all the dhall we have in the repo actually compiles - # - all the example manifests actually typecheck as Manifests verify-dhall = pkgs.runCommand "verify-dhall" { @@ -404,490 +159,58 @@ ./lib/fixtures/manifests ]; }; - env = { - inherit DHALL_PRELUDE; - }; - buildInputs = with pkgs; [ + nativeBuildInputs = with pkgs; [ dhall dhall-json parallel ]; + inherit DHALL_PRELUDE; } '' - set -euo pipefail - - mkdir -p cache/dhall - export XDG_CACHE_HOME="$PWD/cache" - - find $src/types/v1 -iname "*.dhall" \ - | parallel ${ - lib.strings.escapeShellArgs [ - "--will-cite" - '' - echo "Typechecking {}" - dhall <<< {} | tee $out - '' - ] - } - - find $src/lib/fixtures/manifests -iname "*.json" \ - | parallel ${ - lib.strings.escapeShellArgs [ - "--will-cite" - '' - echo "Conforming {} to the Manifest type" - json-to-dhall --plain --records-loose --unions-strict --file {} $src/types/v1/Manifest.dhall | tee --append $out - '' - ] - } + mkdir -p cache/dhall && export XDG_CACHE_HOME="$PWD/cache" + find $src/types/v1 -name "*.dhall" | parallel --will-cite 'dhall <<< {}' + find $src/lib/fixtures/manifests -name "*.json" | parallel --will-cite \ + 'json-to-dhall --plain --records-loose --unions-strict --file {} $src/types/v1/Manifest.dhall' + touch $out ''; - # This is an integration test that will run the server and allow us to - # test it by sending API requests. You can run only this check with: - # nix build .#checks.${your-system}.integration - integration = - if pkgs.stdenv.isDarwin then - pkgs.runCommand "integration-disabled" { } '' - mkdir $out - echo "Integration tests are not supported on macOS systems, skipping..." - exit 0 - '' - else - let - serverPort = 8080; - githubPort = 9001; - bucketPort = 9002; - s3Port = 9003; - pursuitPort = 9004; - stateDir = "/var/lib/registry-server"; - envVars = defaultEnv // { - # We override all remote APIs with their local wiremock ports - GITHUB_API_URL = "http://localhost:${toString githubPort}"; - S3_API_URL = "http://localhost:${toString s3Port}"; - S3_BUCKET_URL = "http://localhost:${toString bucketPort}"; - PURSUIT_API_URL = "http://localhost:${toString pursuitPort}"; - - # We add an extra env var for the mock git applicaiton to know - # where the fixtures are. - REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; - }; - in - pkgs.nixosTest { - name = "server integration test"; - nodes = { - registry = { - imports = [ - (import ./nix/wiremock.nix { service = "github-api"; }) - (import ./nix/wiremock.nix { service = "s3-api"; }) - (import ./nix/wiremock.nix { service = "bucket-api"; }) - (import ./nix/wiremock.nix { service = "pursuit-api"; }) - ./nix/module.nix - ]; - config = { - nixpkgs.overlays = [ - # We need to ensure that the server is using the mock git - # binary instead of the real one. We do not, however, want - # to override 'git' in nixpkgs because that would make us - # rebuild everything that depends on git. - (_: prev: { registry.apps.server = prev.registry.apps.server.override { git = prev.gitMock; }; }) - ]; - - virtualisation.graphics = false; - - services.registry-server = { - enable = true; - host = "localhost"; - port = serverPort; - enableCerts = false; - stateDir = stateDir; - envVars = envVars; - }; - - services.wiremock-github-api = { - enable = true; - port = githubPort; - mappings = [ - { - request = { - method = "GET"; - url = "/repos/purescript/purescript-effect/contents/bower.json?ref=v4.0.0"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - type = "file"; - encoding = "base64"; - content = '' - ewogICJuYW1lIjogInB1cmVzY3JpcHQtZWZmZWN0IiwKICAiaG9tZXBhZ2Ui - OiAiaHR0cHM6Ly9naXRodWIuY29tL3B1cmVzY3JpcHQvcHVyZXNjcmlwdC1l - ZmZlY3QiLAogICJsaWNlbnNlIjogIkJTRC0zLUNsYXVzZSIsCiAgInJlcG9z - aXRvcnkiOiB7CiAgICAidHlwZSI6ICJnaXQiLAogICAgInVybCI6ICJodHRw - czovL2dpdGh1Yi5jb20vcHVyZXNjcmlwdC9wdXJlc2NyaXB0LWVmZmVjdC5n - aXQiCiAgfSwKICAiaWdub3JlIjogWwogICAgIioqLy4qIiwKICAgICJib3dl - cl9jb21wb25lbnRzIiwKICAgICJub2RlX21vZHVsZXMiLAogICAgIm91dHB1 - dCIsCiAgICAidGVzdCIsCiAgICAiYm93ZXIuanNvbiIsCiAgICAicGFja2Fn - ZS5qc29uIgogIF0sCiAgImRlcGVuZGVuY2llcyI6IHsKICAgICJwdXJlc2Ny - aXB0LXByZWx1ZGUiOiAiXjYuMC4wIgogIH0KfQo= - ''; - }; - }; - } - { - request = { - method = "GET"; - url = "/repos/purescript/purescript-effect/contents/LICENSE?ref=v4.0.0"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - type = "file"; - encoding = "base64"; - content = '' - Q29weXJpZ2h0IDIwMTggUHVyZVNjcmlwdAoKUmVkaXN0cmlidXRpb24gYW5k - IHVzZSBpbiBzb3VyY2UgYW5kIGJpbmFyeSBmb3Jtcywgd2l0aCBvciB3aXRo - b3V0IG1vZGlmaWNhdGlvbiwKYXJlIHBlcm1pdHRlZCBwcm92aWRlZCB0aGF0 - IHRoZSBmb2xsb3dpbmcgY29uZGl0aW9ucyBhcmUgbWV0OgoKMS4gUmVkaXN0 - cmlidXRpb25zIG9mIHNvdXJjZSBjb2RlIG11c3QgcmV0YWluIHRoZSBhYm92 - ZSBjb3B5cmlnaHQgbm90aWNlLCB0aGlzCmxpc3Qgb2YgY29uZGl0aW9ucyBh - bmQgdGhlIGZvbGxvd2luZyBkaXNjbGFpbWVyLgoKMi4gUmVkaXN0cmlidXRp - b25zIGluIGJpbmFyeSBmb3JtIG11c3QgcmVwcm9kdWNlIHRoZSBhYm92ZSBj - b3B5cmlnaHQgbm90aWNlLAp0aGlzIGxpc3Qgb2YgY29uZGl0aW9ucyBhbmQg - dGhlIGZvbGxvd2luZyBkaXNjbGFpbWVyIGluIHRoZSBkb2N1bWVudGF0aW9u - IGFuZC9vcgpvdGhlciBtYXRlcmlhbHMgcHJvdmlkZWQgd2l0aCB0aGUgZGlz - dHJpYnV0aW9uLgoKMy4gTmVpdGhlciB0aGUgbmFtZSBvZiB0aGUgY29weXJp - Z2h0IGhvbGRlciBub3IgdGhlIG5hbWVzIG9mIGl0cyBjb250cmlidXRvcnMK - bWF5IGJlIHVzZWQgdG8gZW5kb3JzZSBvciBwcm9tb3RlIHByb2R1Y3RzIGRl - cml2ZWQgZnJvbSB0aGlzIHNvZnR3YXJlIHdpdGhvdXQKc3BlY2lmaWMgcHJp - b3Igd3JpdHRlbiBwZXJtaXNzaW9uLgoKVEhJUyBTT0ZUV0FSRSBJUyBQUk9W - SURFRCBCWSBUSEUgQ09QWVJJR0hUIEhPTERFUlMgQU5EIENPTlRSSUJVVE9S - UyAiQVMgSVMiIEFORApBTlkgRVhQUkVTUyBPUiBJTVBMSUVEIFdBUlJBTlRJ - RVMsIElOQ0xVRElORywgQlVUIE5PVCBMSU1JVEVEIFRPLCBUSEUgSU1QTElF - RApXQVJSQU5USUVTIE9GIE1FUkNIQU5UQUJJTElUWSBBTkQgRklUTkVTUyBG - T1IgQSBQQVJUSUNVTEFSIFBVUlBPU0UgQVJFCkRJU0NMQUlNRUQuIElOIE5P - IEVWRU5UIFNIQUxMIFRIRSBDT1BZUklHSFQgSE9MREVSIE9SIENPTlRSSUJV - VE9SUyBCRSBMSUFCTEUgRk9SCkFOWSBESVJFQ1QsIElORElSRUNULCBJTkNJ - REVOVEFMLCBTUEVDSUFMLCBFWEVNUExBUlksIE9SIENPTlNFUVVFTlRJQUwg - REFNQUdFUwooSU5DTFVESU5HLCBCVVQgTk9UIExJTUlURUQgVE8sIFBST0NV - UkVNRU5UIE9GIFNVQlNUSVRVVEUgR09PRFMgT1IgU0VSVklDRVM7CkxPU1Mg - T0YgVVNFLCBEQVRBLCBPUiBQUk9GSVRTOyBPUiBCVVNJTkVTUyBJTlRFUlJV - UFRJT04pIEhPV0VWRVIgQ0FVU0VEIEFORCBPTgpBTlkgVEhFT1JZIE9GIExJ - QUJJTElUWSwgV0hFVEhFUiBJTiBDT05UUkFDVCwgU1RSSUNUIExJQUJJTElU - WSwgT1IgVE9SVAooSU5DTFVESU5HIE5FR0xJR0VOQ0UgT1IgT1RIRVJXSVNF - KSBBUklTSU5HIElOIEFOWSBXQVkgT1VUIE9GIFRIRSBVU0UgT0YgVEhJUwpT - T0ZUV0FSRSwgRVZFTiBJRiBBRFZJU0VEIE9GIFRIRSBQT1NTSUJJTElUWSBP - RiBTVUNIIERBTUFHRS4K - ''; - }; - }; - } - { - request = { - method = "GET"; - url = "/repos/purescript/package-sets/tags"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - name = "psc-0.15.10-20230105"; - commit = { - sha = "090897c992b2b310b1456506308db789672adac1"; - url = "https://api.github.com/repos/purescript/package-sets/commits/090897c992b2b310b1456506308db789672adac1"; - }; - }; - }; - } - ]; - }; - - services.wiremock-s3-api = { - enable = true; - port = s3Port; - files = [ - { - name = "prelude-6.0.1.tar.gz"; - path = ./app/fixtures/registry-storage/prelude-6.0.1.tar.gz; - } - ]; - mappings = [ - { - request = { - method = "GET"; - url = "/prelude/6.0.1.tar.gz"; - }; - response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "prelude-6.0.1.tar.gz"; - }; - } - ]; - }; - - services.wiremock-bucket-api = { - enable = true; - port = bucketPort; - mappings = [ - { - request = { - method = "GET"; - }; - response = { - status = 200; - body = ''prelude/6.0.1.tar.gz16298"abc123"''; - }; - } - # We don't expect that effect-4.0.0 has been uploaded. - { - request = { - method = "PUT"; - url = "/effect/4.0.0.tar.gz?x-id=PutObject"; - }; - response = { - status = 200; - body = ''"abc123"''; - }; - } - # But we do expect that prelude has been uploaded and - # can't be uploaded again. - { - request = { - method = "PUT"; - url = "/prelude/6.0.1.tar.gz?x-id=PutObject"; - }; - response = { - status = 500; - }; - } - ]; - }; - - services.wiremock-pursuit-api = { - enable = true; - port = pursuitPort; - mappings = [ - # Already-published packages, ie. the registry-storage - # tarballs. - { - request = { - method = "GET"; - url = "/packages/purescript-prelude/available-versions"; - }; - response = { - status = 200; - body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; - }; - } - # The result of publishing a package, which we hardcode - # to 201 (success) for now. - { - request = { - method = "POST"; - url = "/packages"; - }; - response = { - status = 201; - }; - } - ]; - }; - }; - }; - client = { - config = { - virtualisation.graphics = false; - }; - }; - }; - - # Test scripts are written in Python: - # https://nixos.org/manual/nixos/stable/index.html#sec-nixos-tests - # - # Note that the python file will be linted, and the test will fail if - # the script fails the lint — if you see an unexpected failure, check - # the nix log for errors. - testScript = - let - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - - mkdir -p ${envVars.REPO_FIXTURES_DIR}/purescript - - git config --global user.email "pacchettibotti@purescript.org" - git config --global user.name "pacchettibotti" - git config --global init.defaultBranch "master" - - # First the registry-index repo - cp -r ${./app/fixtures/registry-index} ${envVars.REPO_FIXTURES_DIR}/purescript/registry-index - - # Then the registry repo - cp -r ${./app/fixtures/registry} ${envVars.REPO_FIXTURES_DIR}/purescript/registry - - # Finally, the legacy package-sets repo - cp -r ${./app/fixtures/package-sets} ${envVars.REPO_FIXTURES_DIR}/purescript/package-sets - - # Next, we set up arbitrary Git repos that should be available - cp -r ${./app/fixtures/github-packages/effect-4.0.0} ${envVars.REPO_FIXTURES_DIR}/purescript/purescript-effect - - # Then we initialize the repos - for REPO in ${envVars.REPO_FIXTURES_DIR}/purescript/*/ - do - pushd $REPO - echo "Initializing $REPO" - git init - git add . - git commit -m "Fixture commit" - # Necessary so you can push to the upstream on the same branch - # as you are currently on. Wrecks the tree for the upstream, - # but this is acceptable for testing. - git config receive.denyCurrentBranch ignore - popd - done - - # Then we fixup the repos that need tags - pushd ${envVars.REPO_FIXTURES_DIR}/purescript/package-sets - git tag -m "psc-0.15.4-20230105" psc-0.15.4-20230105 - popd - - pushd ${envVars.REPO_FIXTURES_DIR}/purescript/purescript-effect - git tag -m "v4.0.0" v4.0.0 - popd - ''; - - publish_effect = pkgs.writeText "publish-effect-4.0.0.json" '' - { - "name": "effect", - "ref": "v4.0.0", - "compiler": "0.15.4", - "location": { - "githubOwner": "purescript", - "githubRepo": "purescript-effect" - } - } - ''; - in - '' - import json - import time - - ########## - # - # SETUP - # - ########## - - # We set up the git fixtures - registry.start() - print(registry.succeed("${setupGitFixtures}/bin/setup-git-fixtures")) - - # We wait for the server to start up and for the client to be able to reach it. - registry.wait_for_unit("wiremock-github-api.service") - registry.wait_for_unit("wiremock-s3-api.service") - registry.wait_for_unit("wiremock-bucket-api.service") - registry.wait_for_unit("wiremock-pursuit-api.service") - registry.wait_for_unit("server.service") - - # Give time for all the various services to come up... - client.start() - client.wait_until_succeeds("${pkgs.curl}/bin/curl --fail-with-body http://registry/api/v1/jobs", timeout=20) - - ########## - # - # TESTS - # - ########## - - # First we initiate the call to publish - print("POST /publish") - publish_result = json.loads(client.succeed("${pkgs.curl}/bin/curl -L -X POST -d '@${publish_effect}' http://registry/api/v1/publish --header 'Content-Type:application/json'")) - print(publish_result) - job_id = publish_result['jobId'] - assert len(job_id) == 36, f"POST /publish should return a 36-char job id, but returned {publish_result}" - - # Then we poll for job results, expecting an eventual 'success'. - try_count = 0 - delay_seconds = 3 - prev_timestamp = "2023-07-29T00:00:00.000Z" - log_level = "DEBUG" - while True: - print(f"Requesting job information for job {job_id}") - poll_result = json.loads(client.succeed(f"${pkgs.curl}/bin/curl -L http://registry/api/v1/jobs/{job_id}?since={prev_timestamp}&level={log_level}")) - print(poll_result) - if "finishedAt" in poll_result: - print("Job has completed!") - success = poll_result['success'] - assert success, f"GET /jobs/{job_id} should return success, but it returned {poll_result}" - break - elif (try_count * delay_seconds) > 60: - raise ValueError(f"Cancelling publish request after {try_count * delay_seconds} seconds, this is too long...") - else: - print(f"Job is still ongoing, retrying in {delay_seconds} seconds...") - time.sleep(delay_seconds) - try_count = try_count + 1 - ''; - }; + integration = pkgs.callPackage ./nix/test/integration.nix { + inherit overlays; + rootPath = ./.; + }; }; - devShells = { - default = pkgs.mkShell { - inherit GIT_LFS_SKIP_SMUDGE; - - name = "registry-dev"; - packages = with pkgs; [ - # All stable PureScript compilers - registry.compilers - registry.purs-versions - - # TODO: Hacky, remove when I can run spago test in a pure env - run-tests-script - - # Deployment + devShells.default = pkgs.mkShell { + name = "registry-dev"; + inherit GIT_LFS_SKIP_SMUDGE; + packages = + with pkgs; + registry-runtime-deps + ++ [ + # Development-specific tools colmena - - # Project tooling - nixFlakes nixfmt-rfc-style - git - git-lfs bash nodejs jq - licensee - coreutils - gzip - gnutar - dhall - dhall-json dbmate - - # Development tooling purs - spago-bin.spago-0_93_44 + spago purs-tidy-unstable purs-backend-es-unstable ]; - }; }; } ) - # Separated because this is not supported for all systems. // { - # Deployment specification for the registry server colmena = { meta = { nixpkgs = import nixpkgs { system = "x86_64-linux"; - overlays = [ - purescript-overlay.overlays.default - mkSpagoDerivation.overlays.default - slimlock.overlays.default - registryOverlay - ]; + inherit overlays; }; }; - # The registry server + registry = { lib, modulesPath, ... }: let @@ -897,27 +220,21 @@ deployment.targetHost = host; deployment.buildOnTarget = true; - # We import the server module and also the digital ocean configuration - # necessary to run in a DO droplet. imports = lib.optional (builtins.pathExists ./do-userdata.nix) ./do-userdata.nix ++ [ (modulesPath + "/virtualisation/digital-ocean-config.nix") - ./nix/module.nix - # Extra config for the deployed server only. + ./nix/registry-server.nix { - # Enable Digital Ocean monitoring services.do-agent.enable = true; - - # Enable the registry server - services.registry-server.enable = true; - services.registry-server.host = host; - services.registry-server.envVars = { - # These env vars are known to Nix so we set them in advance. - # Others, like credentials, must be set in a .env file in - # the state directory, unless there are viable defaults. - inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; + services.registry-server = { + enable = true; + host = host; + envVars = { + # These env vars are known to Nix so we set them in advance. + # Others, like credentials, must be set in a .env file in + # the state directory, unless there are viable defaults. + inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; + }; }; - - # Don't change this. system.stateVersion = "24.05"; } ]; diff --git a/foreign/src/Foreign/Octokit.purs b/foreign/src/Foreign/Octokit.purs index c0258b096..d7787466b 100644 --- a/foreign/src/Foreign/Octokit.purs +++ b/foreign/src/Foreign/Octokit.purs @@ -207,12 +207,17 @@ getCommitDateRequest { address, commitSha } = , headers: Object.empty , args: noArgs , paginate: false - , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "Commit" $ CJ.Record.object - { committer: CJ.Record.object { date: Internal.Codec.iso8601DateTime } } + , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "CommitData" $ CJ.Record.object + { data: CJ.named "Commit" $ CJ.Record.object + { committer: CJ.named "Commit.committer" $ CJ.Record.object + { date: Internal.Codec.iso8601DateTime + } + } + } } where - toJsonRep date = { committer: { date } } - fromJsonRep = _.committer.date + toJsonRep date = { data: { committer: { date } } } + fromJsonRep = _.data.committer.date -- | Create a comment on an issue. Requires authentication. -- | https://github.com/octokit/plugin-rest-endpoint-methods.js/blob/v5.16.0/docs/issues/createComment.md diff --git a/foreign/src/Foreign/Tmp.js b/foreign/src/Foreign/Tmp.js index b11d10232..8995afdfc 100644 --- a/foreign/src/Foreign/Tmp.js +++ b/foreign/src/Foreign/Tmp.js @@ -3,6 +3,6 @@ import { setGracefulCleanup, dirSync } from "tmp"; setGracefulCleanup(); export const mkTmpDirImpl = () => { - const tmpobj = dirSync(); + const tmpobj = dirSync({ template: 'XXXXXX' }); return tmpobj.name; }; diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..4bae692f5 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,8 +15,6 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.PackageName (PackageName) -import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG @@ -66,9 +64,6 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , ref :: String , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -78,9 +73,6 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec - , packageName: PackageName.codec - , ref: CJ.string , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean @@ -94,26 +86,6 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob - -derive instance Eq JobType - -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - -printJobType :: JobType -> String -printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) - type LogLine = { level :: LogLevel , message :: String diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs new file mode 100644 index 000000000..dbc4eaf01 --- /dev/null +++ b/lib/src/JobType.purs @@ -0,0 +1,27 @@ +module Registry.JobType where + +import Prelude + +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Sum as CJ.Sum +import Data.Either (Either(..), hush) + +data JobType = PublishJob | UnpublishJob | TransferJob + +derive instance Eq JobType + +parse :: String -> Either String JobType +parse = case _ of + "publish" -> Right PublishJob + "unpublish" -> Right UnpublishJob + "transfer" -> Right TransferJob + j -> Left $ "Invalid job type " <> show j + +print :: JobType -> String +print = case _ of + PublishJob -> "publish" + UnpublishJob -> "unpublish" + TransferJob -> "transfer" + +codec :: CJ.Codec JobType +codec = CJ.Sum.enumSum print (hush <<< parse) diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index c867b5d9b..4837b49ed 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -103,25 +103,18 @@ lookup name version (ManifestIndex index) = -- | Insert a new manifest into the manifest index, failing if the manifest -- | indicates dependencies that cannot be satisfied. Dependencies are not -- | satisfied if the package is not in the index. -insert :: Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex -insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do +insert :: IncludeRanges -> Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex +insert consider manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do let unsatisfied :: Map PackageName Range unsatisfied = Map.fromFoldable do Tuple dependency range <- Map.toUnfoldable dependencies case Map.lookup dependency index of - Just _versions -> - -- Ideally we would enforce that inserting a manifest requires that - -- at least one version exists in the index in the given range already - -- Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> - -- - -- However, to be somewhat lenient on what packages can be admitted to - -- the official index, we just look to see the package name exists. - -- - -- Note that if we _do_ add this check later on, we will need to - -- produce an alternate version that does not check version bounds for - -- use in validatiing package sets, ie. 'maximalIndexIgnoringBounds' - [] + Just versions -> case consider of + IgnoreRanges -> [] + ConsiderRanges + | Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> [] + | otherwise -> [ Tuple dependency range ] _ -> [ Tuple dependency range ] @@ -137,12 +130,12 @@ insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) -- | package names (and not package versions), it is always acceptable to delete -- | a package version so long as it has at least 2 versions. However, removing -- | a package altogether incurs a full validation check. -delete :: PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -delete name version (ManifestIndex index) = do +delete :: IncludeRanges -> PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +delete consider name version (ManifestIndex index) = do case Map.lookup name index of Nothing -> pure (ManifestIndex index) Just versionsMap | Map.size versionsMap == 1 -> - fromSet $ Set.fromFoldable do + fromSet consider $ Set.fromFoldable do Tuple _ versions <- Map.toUnfoldableUnordered (Map.delete name index) Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] @@ -151,21 +144,21 @@ delete name version (ManifestIndex index) = do -- | Convert a set of manifests into a `ManifestIndex`. Reports all failures -- | encountered rather than short-circuiting. -fromSet :: Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -fromSet manifests = do - let Tuple failed index = maximalIndex manifests +fromSet :: IncludeRanges -> Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +fromSet consider manifests = do + let Tuple failed index = maximalIndex consider manifests if Map.isEmpty failed then Right index else Left failed -- | Produce the maximal `ManifestIndex` possible for the given set of -- | `Manifest`s, collecting failures along the way. -maximalIndex :: Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -maximalIndex manifests = do +maximalIndex :: IncludeRanges -> Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +maximalIndex consider manifests = do let - insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert manifest index of + insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert consider manifest index of Left errors -> Tuple (Map.insertWith Map.union name (Map.singleton version errors) failed) index Right newIndex -> Tuple failed newIndex - Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort IgnoreRanges manifests) + Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort consider manifests) data IncludeRanges = ConsiderRanges diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index 62fe3c5e8..16392b017 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,15 +20,20 @@ module Registry.Metadata import Prelude +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) import Data.Array.NonEmpty (NonEmptyArray) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.DateTime (DateTime) +import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Location (Location) import Registry.Location as Location @@ -37,6 +42,8 @@ import Registry.Owner as Owner import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) +import Registry.Version as Version +import Type.Proxy (Proxy(..)) -- | A record of all published and unpublished versions of a package, along with -- | the last-used location and any owners (public keys) authorized to take @@ -67,14 +74,18 @@ codec = Profunctor.wrapIso Metadata $ CJ.named "Metadata" $ CJ.object -- | not rely on its presence! type PublishedMetadata = { bytes :: Number + , compilers :: NonEmptyArray Version , hash :: Sha256 , publishedTime :: DateTime + + -- UNSPECIFIED: Will be removed in the future. , ref :: String } publishedMetadataCodec :: CJ.Codec PublishedMetadata publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object { bytes: CJ.number + , compilers: CJ.Common.nonEmptyArray Version.codec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime , ref: CJ.string diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..262ceb3db 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,8 +14,8 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData @@ -23,6 +23,9 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageName + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec @@ -58,6 +61,25 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -74,6 +96,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -84,6 +107,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } @@ -178,6 +202,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: diff --git a/lib/src/Operation/Validation.purs b/lib/src/Operation/Validation.purs index 0dc31e283..c842145d9 100644 --- a/lib/src/Operation/Validation.purs +++ b/lib/src/Operation/Validation.purs @@ -5,10 +5,10 @@ import Prelude import Data.Array as Array import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Bifunctor as Bifunctor import Data.DateTime (DateTime) import Data.DateTime as DateTime import Data.Either (Either(..)) -import Data.List.NonEmpty (NonEmptyList) import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..), maybe) @@ -20,7 +20,7 @@ import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.Time.Duration (Hours(..)) import Data.Traversable (traverse) -import Data.Tuple (Tuple(..), uncurry) +import Data.Tuple (Tuple(..), snd, uncurry) import Data.Tuple.Nested (type (/\), (/\)) import Effect.Aff as Aff import Effect.Aff.Class (class MonadAff, liftAff) @@ -32,14 +32,15 @@ import PureScript.CST.Errors as CST.Errors import PureScript.CST.Types as CST.Types import Registry.Location (Location) import Registry.Manifest (Manifest(..)) -import Registry.ManifestIndex (ManifestIndex) -import Registry.ManifestIndex as ManifestIndex import Registry.Metadata (Metadata(..), PublishedMetadata, UnpublishedMetadata) import Registry.Operation (PublishData) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.PursGraph (AssociatedError, ModuleName, PursGraph) +import Registry.PursGraph as PursGraph import Registry.Range (Range) import Registry.Range as Range +import Registry.Solver (CompilerIndex) import Registry.Solver as Solver import Registry.Version (Version) @@ -71,11 +72,63 @@ isNotUnpublished :: Manifest -> Metadata -> Maybe UnpublishedMetadata isNotUnpublished (Manifest { version }) (Metadata { unpublished }) = Map.lookup version unpublished +data ValidateDepsError + = UnusedDependencies (NonEmptySet PackageName) + | MissingDependencies (NonEmptySet PackageName) + | UnusedAndMissing { unused :: NonEmptySet PackageName, missing :: NonEmptySet PackageName } + +derive instance Eq ValidateDepsError + +printValidateDepsError :: ValidateDepsError -> String +printValidateDepsError = case _ of + UnusedDependencies unused -> + "Unused dependencies (" <> printPackages unused <> ")" + MissingDependencies missing -> + "Missing dependencies (" <> printPackages missing <> ")" + UnusedAndMissing { unused, missing } -> + "Unused dependencies (" <> printPackages unused <> ") and missing dependencies (" <> printPackages missing <> ")" + where + printPackages :: NonEmptySet PackageName -> String + printPackages = String.joinWith ", " <<< map PackageName.print <<< NonEmptySet.toUnfoldable + +-- | Verifies that the manifest lists dependencies imported in the source code, +-- | no more (ie. unused) and no less (ie. transitive). The graph passed to this +-- | function should be the output of 'purs graph' executed on the 'output' +-- | directory of the package compiled with its dependencies. +noTransitiveOrMissingDeps :: Manifest -> PursGraph -> (FilePath -> Either String PackageName) -> Either (Either (NonEmptyArray AssociatedError) ValidateDepsError) Unit +noTransitiveOrMissingDeps (Manifest manifest) graph parser = do + associated <- Bifunctor.lmap Left $ PursGraph.associateModules parser graph + + let + packageModules :: Set ModuleName + packageModules = Map.keys $ Map.filter (_ == manifest.name) associated + + directImportModules :: Set ModuleName + directImportModules = PursGraph.directDependenciesOf packageModules graph + + directImportPackages :: Set PackageName + directImportPackages = Set.mapMaybe (flip Map.lookup associated) directImportModules + + -- Unused packages are those which are listed in the manifest dependencies + -- but which are not imported by the package source code. + unusedDependencies :: Set PackageName + unusedDependencies = Set.filter (not <<< flip Set.member directImportPackages) (Map.keys manifest.dependencies) + + -- Missing packages are those which are imported by the package source code + -- but which are not listed in its dependencies. + missingDependencies :: Set PackageName + missingDependencies = Set.filter (not <<< flip Map.member manifest.dependencies) directImportPackages + + case NonEmptySet.fromSet unusedDependencies, NonEmptySet.fromSet missingDependencies of + Nothing, Nothing -> Right unit + Just unused, Nothing -> Left $ Right $ UnusedDependencies unused + Nothing, Just missing -> Left $ Right $ MissingDependencies missing + Just unused, Just missing -> Left $ Right $ UnusedAndMissing { unused, missing } + -- | Verifies that the manifest dependencies are solvable by the registry solver. -validateDependenciesSolve :: Manifest -> ManifestIndex -> Either (NonEmptyList Solver.SolverError) (Map PackageName Version) -validateDependenciesSolve manifest manifestIndex = do - let getDependencies = _.dependencies <<< un Manifest - Solver.solve (map (map getDependencies) (ManifestIndex.toMap manifestIndex)) (getDependencies manifest) +validateDependenciesSolve :: Version -> Manifest -> CompilerIndex -> Either Solver.SolverErrors (Map PackageName Version) +validateDependenciesSolve compiler (Manifest manifest) compilerIndex = + map snd $ Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies -- | Verifies that all dependencies in the manifest are present in the build -- | plan, and the version listed in the build plan is within the range provided @@ -97,23 +150,6 @@ getUnresolvedDependencies (Manifest { dependencies }) resolutions = | not (Range.includes dependencyRange version) -> Just $ Right $ dependencyName /\ dependencyRange /\ version | otherwise -> Nothing --- | Discovers dependencies listed in the manifest that are not actually used --- | by the solved dependencies. This should not produce an error, but it --- | indicates an over-constrained manifest. -getUnusedDependencies :: Manifest -> Map PackageName Version -> Set PackageName -> Maybe (NonEmptySet PackageName) -getUnusedDependencies (Manifest { dependencies }) resolutions discovered = do - let - -- There may be too many resolved dependencies because the manifest includes - -- e.g. test dependencies, so we start by only considering resolved deps - -- that are actually used. - inUse = Set.filter (flip Set.member discovered) (Map.keys resolutions) - - -- Next, we can determine which dependencies are unused by looking at the - -- difference between the manifest dependencies and the resolved packages - unused = Set.filter (not <<< flip Set.member inUse) (Map.keys dependencies) - - NonEmptySet.fromSet unused - data TarballSizeResult = ExceedsMaximum Number | WarnPackageSize Number diff --git a/lib/src/PursGraph.purs b/lib/src/PursGraph.purs index 5ed1e512b..d95bff119 100644 --- a/lib/src/PursGraph.purs +++ b/lib/src/PursGraph.purs @@ -79,7 +79,17 @@ associateModules parse graph = do -- | Find direct dependencies of the given module, according to the given graph. directDependencies :: ModuleName -> PursGraph -> Maybe (Set ModuleName) -directDependencies name = map (Set.fromFoldable <<< _.depends) <<< Map.lookup name +directDependencies start graph = Map.lookup start graph <#> \_ -> directDependenciesOf (Set.singleton start) graph + +-- | Find direct dependencies of a set of input modules according to the given +-- | graph, excluding the input modules themselves. +directDependenciesOf :: Set ModuleName -> PursGraph -> Set ModuleName +directDependenciesOf sources graph = do + let + foldFn prev name = case Map.lookup name graph of + Nothing -> prev + Just { depends } -> Set.union prev (Array.foldl (\acc mod -> if Set.member mod sources then acc else Set.insert mod acc) Set.empty depends) + Array.foldl foldFn Set.empty $ Set.toUnfoldable sources -- | Find all dependencies of the given module, according to the given graph, -- | excluding the module itself. diff --git a/lib/src/Range.purs b/lib/src/Range.purs index b5dbdcf59..c0e5e1d45 100644 --- a/lib/src/Range.purs +++ b/lib/src/Range.purs @@ -6,15 +6,16 @@ module Registry.Range , caret , exact , codec + , exact , greaterThanOrEq , includes , intersect , lessThan + , mk , parse , parser , print , union - , mk ) where import Prelude diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index ac0086c76..929894645 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -5,9 +5,12 @@ import Prelude import Control.Alternative (guard) import Data.Array as Array +import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Array.NonEmpty as NonEmptyArray import Data.Bifunctor (lmap) import Data.Either (Either(..)) +import Data.Either as Either import Data.Foldable (fold, foldMap, intercalate) import Data.FoldableWithIndex (anyWithIndex, foldMapWithIndex, foldlWithIndex, forWithIndex_) import Data.Functor.App (App(..)) @@ -20,6 +23,7 @@ import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) import Data.Semigroup.Foldable (intercalateMap) +import Data.Semigroup.Foldable as Foldable1 import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -27,6 +31,11 @@ import Data.Set.NonEmpty as NES import Data.Traversable (for, sequence, traverse) import Data.TraversableWithIndex (forWithIndex, traverseWithIndex) import Data.Tuple (Tuple(..), fst, snd) +import Partial.Unsafe as Partial +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex (ManifestIndex) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata(..)) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) @@ -39,6 +48,46 @@ import Safe.Coerce (coerce) -- Public API -------------------------------------------------------------------------------- +-- | A 'DependencyIndex' enriched to include the compiler versions supported by +-- | each package version as a dependency. +newtype CompilerIndex = CompilerIndex DependencyIndex + +derive instance Newtype CompilerIndex _ + +-- | Associate the compiler versions supported by each package version by +-- | inserting them as a range in the version's dependencies. +buildCompilerIndex :: NonEmptyArray Version -> ManifestIndex -> Map PackageName Metadata -> CompilerIndex +buildCompilerIndex pursCompilers index metadata = CompilerIndex do + let + purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + + getDependencies (Manifest manifest) = fromMaybe manifest.dependencies do + Metadata { published } <- Map.lookup manifest.name metadata + { compilers } <- Map.lookup manifest.version published + -- Construct a maximal range for the compilers the + -- indicated package version supports. + let + min = Foldable1.minimum compilers + max = Version.bumpPatch $ Foldable1.maximum compilers + pursRange <- Range.mk min max + pure $ Map.insert purs pursRange manifest.dependencies + + newPurs version = Map.singleton purs (Map.singleton version Map.empty) + pursVersions = Array.foldl (\acc compiler -> Map.unionWith Map.union (newPurs compiler) acc) Map.empty (NonEmptyArray.toArray pursCompilers) + dependencyIndex = map (map getDependencies) (ManifestIndex.toMap index) + + Map.unionWith Map.union pursVersions dependencyIndex + +-- | Solve the given dependencies using a dependency index that includes compiler +-- | versions, such that the solution prunes results that would fall outside +-- | a compiler range accepted by all dependencies. +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler pursRange (CompilerIndex index) required = do + let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } + let pursVersion = Map.lookup purs results + pure $ Tuple pursVersion $ Map.delete purs results + -- | Data from the registry index, listing dependencies for each version of -- | each package type DependencyIndex = Map PackageName (Map Version (Map PackageName Range)) @@ -146,6 +195,7 @@ intersectionFromRange' package range = -------------------------------------------------------------------------------- type SolverErrors = NEL.NonEmptyList SolverError + data SolverError = Conflicts (Map PackageName Intersection) | WhileSolving PackageName (Map Version SolverError) diff --git a/lib/test/Registry/ManifestIndex.purs b/lib/test/Registry/ManifestIndex.purs index c37d6875a..18e0863ef 100644 --- a/lib/test/Registry/ManifestIndex.purs +++ b/lib/test/Registry/ManifestIndex.purs @@ -74,8 +74,8 @@ spec = do manifest1 = unsafeManifest "prelude" "1.0.0" [] manifest2 = Newtype.over Manifest (_ { description = Just "My prelude description." }) manifest1 index = - ManifestIndex.insert manifest1 ManifestIndex.empty - >>= ManifestIndex.insert manifest2 + ManifestIndex.insert ManifestIndex.ConsiderRanges manifest1 ManifestIndex.empty + >>= ManifestIndex.insert ManifestIndex.ConsiderRanges manifest2 case index of Left errors -> @@ -103,17 +103,20 @@ spec = do tinyIndex :: Array Manifest tinyIndex = [ unsafeManifest "prelude" "1.0.0" [] ] - testIndex { satisfied: tinyIndex, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied: tinyIndex, unsatisfied: [] } Spec.it "Fails to parse non-self-contained index" do let - satisfied :: Array Manifest - satisfied = + satisfiedStrict :: Array Manifest + satisfiedStrict = [ unsafeManifest "prelude" "1.0.0" [] , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] - -- It is OK for the version bounds to not exist, although we may - -- choose to make this more strict in the future. - , unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] + ] + + -- Packages with dependencies that exist, but not at the proper bounds. + satisfiedLoose :: Array Manifest + satisfiedLoose = satisfiedStrict <> + [ unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] ] unsatisfied :: Array Manifest @@ -121,7 +124,8 @@ spec = do [ unsafeManifest "control" "3.0.0" [ Tuple "tuples" ">=2.0.0 <3.0.0" ] ] - testIndex { satisfied, unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: satisfiedStrict, unsatisfied } + testIndex ManifestIndex.IgnoreRanges { satisfied: satisfiedLoose, unsatisfied } Spec.it "Parses cyclical but acceptable index" do let @@ -133,7 +137,7 @@ spec = do , unsafeManifest "control" "2.0.0" [] ] - testIndex { satisfied, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied, unsatisfied: [] } Spec.it "Does not parse unacceptable cyclical index" do let @@ -143,7 +147,7 @@ spec = do , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] ] - testIndex { satisfied: [], unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: [], unsatisfied } contextEntry :: String contextEntry = @@ -155,9 +159,10 @@ contextEntry = testIndex :: forall m . MonadThrow Error m - => { satisfied :: Array Manifest, unsatisfied :: Array Manifest } + => ManifestIndex.IncludeRanges + -> { satisfied :: Array Manifest, unsatisfied :: Array Manifest } -> m Unit -testIndex { satisfied, unsatisfied } = case ManifestIndex.maximalIndex (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of +testIndex consider { satisfied, unsatisfied } = case ManifestIndex.maximalIndex consider (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of Tuple errors result -> do let { fail: shouldHaveErrors } = diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index eff61e185..08c12d887 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -25,24 +25,31 @@ recordStudio = "published": { "0.1.0": { "bytes": 3438, + "compilers": "0.13.0", "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", "publishedTime": "2021-03-27T10:03:46.000Z", "ref": "v0.1.0" }, "0.2.1": { "bytes": 3365, + "compilers": "0.13.0", "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", "publishedTime": "2022-05-15T10:51:57.000Z", "ref": "v0.2.1" }, "1.0.0": { "bytes": 5155, + "compilers": "0.13.0", "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", "publishedTime": "2022-11-03T17:30:28.000Z", "ref": "v1.0.0" }, "1.0.1": { "bytes": 5635, + "compilers": [ + "0.13.0", + "0.13.1" + ], "hash": "sha256-Xm9pwDBHW5zYUEzxfVSgjglIcwRI1gcCOmcpyQ/tqeY=", "publishedTime": "2022-11-04T12:21:09.000Z", "ref": "v1.0.1" diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index 2e5cb47aa..cf474f103 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -2,6 +2,7 @@ module Test.Registry.Operation.Validation where import Prelude +import Data.Array.NonEmpty as NonEmptyArray import Data.Either (Either(..)) import Data.Either as Either import Data.Foldable (for_) @@ -63,8 +64,9 @@ spec = do now = unsafeDateTime "2022-12-12T12:00:00.000Z" outOfRange = unsafeDateTime "2022-12-10T11:00:00.000Z" inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" + compilers = NonEmptyArray.singleton (unsafeVersion "0.13.0") - publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, ref: "" } + publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers, ref: "" } metadata = Metadata { location: defaultLocation diff --git a/lib/test/Registry/Solver.purs b/lib/test/Registry/Solver.purs index bfc0e31b9..a45cf92f9 100644 --- a/lib/test/Registry/Solver.purs +++ b/lib/test/Registry/Solver.purs @@ -7,18 +7,19 @@ import Data.Either (Either(..)) import Data.Foldable (for_) import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList -import Data.Map (Map) +import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map -import Data.Maybe (Maybe(..)) -import Data.Newtype (wrap) +import Data.Maybe (Maybe(..), fromMaybe') +import Data.Newtype (un, wrap) import Data.Semigroup.Foldable (intercalateMap) import Data.Set as Set import Data.Set.NonEmpty as NES import Data.Tuple (Tuple(..)) import Data.Tuple.Nested ((/\)) +import Partial.Unsafe (unsafeCrashWith) import Registry.PackageName as PackageName import Registry.Range as Range -import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), printSolverError, solve) +import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), initializeRegistry, initializeRequired, lowerBound, printSolverError, solve, solveSeed, solveSteps, upperBound) import Registry.Test.Assert as Assert import Registry.Test.Utils (fromRight) import Registry.Types (PackageName, Range, Version) @@ -31,6 +32,11 @@ spec = do shouldSucceed goals result = pure unit >>= \_ -> solve solverIndex (Map.fromFoldable goals) `Assert.shouldContain` (Map.fromFoldable result) + shouldSucceedSteps goals result = pure unit >>= \_ -> do + let solved = solveSteps (solveSeed { registry: initializeRegistry solverIndex, required: initializeRequired (Map.fromFoldable goals) }) + let toRange intersect = fromMaybe' (\_ -> unsafeCrashWith "Bad intersection") (Range.mk (lowerBound intersect) (upperBound intersect)) + map toRange (un SemigroupMap solved.required) `Assert.shouldEqual` Map.fromFoldable result + shouldFail goals errors = pure unit >>= \_ -> case solve solverIndex (Map.fromFoldable goals) of Left solverErrors -> do let expectedErrorCount = Array.length errors @@ -103,6 +109,22 @@ spec = do , prelude.package /\ version 1 ] + Spec.describe "Single-step expands bounds" do + Spec.it "Simple range" do + shouldSucceedSteps + [ simple.package /\ range 0 1 ] + [ simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + + Spec.it "Multi-version range" do + shouldSucceedSteps + [ simple.package /\ range 0 2 ] + [ simple.package /\ range 0 2, prelude.package /\ range 0 2 ] + + Spec.it "Transitive" do + shouldSucceedSteps + [ onlySimple.package /\ range 0 1 ] + [ onlySimple.package /\ range 0 1, simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + Spec.describe "Valid dependency ranges containing some invalid versions solve" do Spec.it "Proceeds past broken ranges to find a later valid range" do -- 'broken-fixed' cannot be solved at the broken version 0, but it can be diff --git a/nix/lib/buildRegistryPackage.nix b/nix/lib/buildRegistryPackage.nix new file mode 100644 index 000000000..af32221e8 --- /dev/null +++ b/nix/lib/buildRegistryPackage.nix @@ -0,0 +1,83 @@ +# Helper function for building registry PureScript executables. Compiles a +# PureScript module to an esbuild-bundled Node.js executable. +# +# Returns a function suitable for callPackage that will be auto-injected with: +# - registry-runtime-deps, registry-package-lock (from overlay) +# - Standard build tools (esbuild, nodejs, etc.) +{ + name, + module, + src, + spagoLock, + description, + extraInstall ? "", +}: +{ + lib, + stdenv, + makeWrapper, + esbuild, + writeText, + nodejs, + registry-runtime-deps, + registry-package-lock, +}: +let + # ESM entrypoint that imports and runs the PureScript main function + entrypoint = writeText "entrypoint.js" '' + import { main } from "./output/${module}"; + main(); + ''; +in +stdenv.mkDerivation { + inherit name src; + + nativeBuildInputs = [ + esbuild + makeWrapper + ]; + + buildInputs = [ nodejs ]; + + meta = { + inherit description; + mainProgram = name; + }; + + buildPhase = '' + runHook preBuild + + # Link dependencies and compiled output + ln -s ${registry-package-lock}/node_modules . + cp -r ${spagoLock}/output . + + # Bundle with esbuild + cp ${entrypoint} entrypoint.js + esbuild entrypoint.js \ + --bundle \ + --outfile=${name}.js \ + --platform=node \ + --packages=external + + runHook postBuild + ''; + + installPhase = '' + runHook preInstall + + mkdir -p $out/bin + + # Install the bundled JavaScript + cp ${name}.js $out/${name}.js + + # Create wrapper script with runtime dependencies in PATH + makeWrapper ${nodejs}/bin/node $out/bin/${name} \ + --add-flags "$out/${name}.js" \ + --set NODE_PATH "${registry-package-lock}/node_modules" \ + --prefix PATH : "${lib.makeBinPath registry-runtime-deps}" + + ${extraInstall} + + runHook postInstall + ''; +} diff --git a/nix/lib/default.nix b/nix/lib/default.nix new file mode 100644 index 000000000..799ebd8de --- /dev/null +++ b/nix/lib/default.nix @@ -0,0 +1,5 @@ +{ lib }: +{ + parseEnv = import ./parseEnv.nix { inherit lib; }; + buildRegistryPackage = import ./buildRegistryPackage.nix; +} diff --git a/nix/lib/parseEnv.nix b/nix/lib/parseEnv.nix new file mode 100644 index 000000000..9590ecf4f --- /dev/null +++ b/nix/lib/parseEnv.nix @@ -0,0 +1,19 @@ +# Parse a .env file into a Nix attrset, skipping comments and empty lines +{ lib }: + +path: +let + lines = lib.splitString "\n" (builtins.readFile path); + isContent = + line: builtins.match "^[[:space:]]*$" line == null && builtins.match "^#.*$" line == null; + toKeyValue = + line: + let + match = builtins.match "([^=]+)=(.*)" line; + in + { + name = builtins.elemAt match 0; + value = builtins.elemAt match 1; + }; +in +builtins.listToAttrs (map toKeyValue (builtins.filter isContent lines)) diff --git a/nix/overlays/default.nix b/nix/overlays/default.nix new file mode 100644 index 000000000..499e8f3fd --- /dev/null +++ b/nix/overlays/default.nix @@ -0,0 +1,13 @@ +# Registry overlays +{ + purescript-overlay, + mkSpagoDerivation, + registryLib, + spagoSrc, + npmSrc, +}: +[ + purescript-overlay.overlays.default + mkSpagoDerivation.overlays.default + (import ./registry.nix { inherit registryLib spagoSrc npmSrc; }) +] diff --git a/nix/overlays/registry.nix b/nix/overlays/registry.nix new file mode 100644 index 000000000..f5e769303 --- /dev/null +++ b/nix/overlays/registry.nix @@ -0,0 +1,211 @@ +# Registry packages overlay +# +# This overlay provides all the registry server components, tools, and scripts. +# +# Architecture: +# - Apps (in ./app): Server and GitHub importer that share compiled dependencies +# - Scripts (in ./scripts): CLI utilities that depend on the app code +# - Build optimization: Apps share a pre-compiled output (app) since they +# use the same dependencies and source. Scripts also use this to avoid recompiling. +{ + registryLib, + spagoSrc, + npmSrc, +}: +final: prev: +let + # Shared compiled output for all apps. Both registry-server and registry-github-importer + # are built from ./app with the same dependencies, so we compile once and reuse. + # Scripts in ./scripts depend on registry-app, so they also benefit from this cache. + app = prev.stdenv.mkDerivation { + name = "registry-app-shared"; + src = ../../app; + nativeBuildInputs = [ prev.purs-backend-es-unstable ]; + + phases = [ + "buildPhase" + "installPhase" + ]; + + buildPhase = '' + # Link dependencies + ln -s ${final.registry-package-lock}/node_modules . + ln -s ${final.registry-spago-lock}/output . + + # Compile PureScript to JavaScript using purs-backend-es + purs-backend-es build + ''; + + installPhase = '' + mkdir $out + cp -r output-es $out/output + + # purs-backend-es doesn't copy foreign files, so we need to manually include them + for dir in output/*/; do + subdir=$(basename "$dir") + if [ -f "output/$subdir/foreign.js" ]; then + cp "output/$subdir/foreign.js" "$out/output/$subdir/" + fi + done + ''; + }; + + # Map of script name -> { module, description } + scripts = { + legacy-importer = { + module = "Registry.Scripts.LegacyImporter"; + description = "Import packages from legacy registries (bower, psc-package, etc.)"; + }; + package-deleter = { + module = "Registry.Scripts.PackageDeleter"; + description = "Delete packages from the registry"; + }; + package-set-updater = { + module = "Registry.Scripts.PackageSetUpdater"; + description = "Update package sets"; + }; + package-transferrer = { + module = "Registry.Scripts.PackageTransferrer"; + description = "Transfer packages between storage backends"; + }; + solver = { + module = "Registry.Scripts.Solver"; + description = "Run dependency solver against registry manifests"; + }; + verify-integrity = { + module = "Registry.Scripts.VerifyIntegrity"; + description = "Verify registry and registry-index consistency"; + }; + compiler-versions = { + module = "Registry.Scripts.CompilerVersions"; + description = "List supported compiler versions"; + }; + }; +in +{ + # Use Node.js 20 LTS for all registry components + nodejs = prev.nodejs_20; + + # Pin spago to the version we use + spago = prev.spago-bin.spago-0_93_44; + + # Spago lock: compiled PureScript dependencies for the entire workspace + registry-spago-lock = prev.mkSpagoDerivation { + name = "registry"; + src = spagoSrc; + nativeBuildInputs = [ + final.spago + prev.purescript + ]; + buildPhase = "spago build"; + installPhase = "mkdir $out; cp -r * $out"; + }; + + # NPM lock: JavaScript dependencies (esbuild, node-gyp, etc.) + registry-package-lock = prev.buildNpmPackage { + pname = "purescript-registry"; + version = "0.0.1"; + src = npmSrc; + dontNpmBuild = true; + + nativeBuildInputs = + with prev; + [ + python3 + nodePackages.node-gyp + ] + ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; + + # To update: run `nix build .#server` and copy the hash from the error + npmDepsHash = "sha256-vm6k4DUDWUgPcPeym3YhA1hIg1LbHCDRBSH+7Zs52Uw="; + + installPhase = '' + mkdir -p $out + rm -f node_modules/{registry-app,registry-lib,registry-foreign} + mv node_modules $out/ + ''; + }; + + # Compiler infrastructure + + # All PureScript compilers we support (filtered from purs-bin overlay) + registry-supported-compilers = prev.lib.filterAttrs ( + name: _: builtins.match "^purs-[0-9]+_[0-9]+_[0-9]+$" name != null + ) prev.purs-bin; + + # Executable directory containing all supported compiler versions + registry-compilers = prev.symlinkJoin { + name = "purs-compilers"; + paths = prev.lib.mapAttrsToList ( + name: drv: prev.writeShellScriptBin name ''exec ${drv}/bin/purs "$@"'' + ) final.registry-supported-compilers; + }; + + # Script that prints all supported compiler versions (space-separated) + registry-purs-versions = prev.writeShellScriptBin "purs-versions" '' + echo ${ + prev.lib.concatMapStringsSep " " ( + x: prev.lib.removePrefix "purs-" (builtins.replaceStrings [ "_" ] [ "." ] x) + ) (prev.lib.attrNames final.registry-supported-compilers) + } + ''; + + # Runtime dependencies needed by all registry executables + # Used in: buildRegistryPackage, spago-test check, and devShell + registry-runtime-deps = with prev; [ + final.registry-compilers + final.registry-purs-versions + + dhall + dhall-json + licensee + git + git-lfs + coreutils + gzip + gnutar + ]; + + # Applications + + registry-server = prev.callPackage (registryLib.buildRegistryPackage { + name = "registry-server"; + module = "Registry.App.Server"; + description = "PureScript Registry API server"; + src = ../../app; + spagoLock = app; + extraInstall = "cp -r ${../../db} $out/bin/db"; + }) { }; + + registry-github-importer = prev.callPackage (registryLib.buildRegistryPackage { + name = "registry-github-importer"; + module = "Registry.App.GitHubIssue"; + description = "Import packages from GitHub issues"; + src = ../../app; + spagoLock = app; + }) { }; + + # Scripts - generated from the scripts attrset with module and description +} +// prev.lib.mapAttrs' ( + name: info: + prev.lib.nameValuePair "registry-${name}" ( + prev.callPackage (registryLib.buildRegistryPackage { + name = "registry-${name}"; + module = info.module; + description = info.description; + src = ../../scripts/src; + spagoLock = final.registry-spago-lock; + }) { } + ) +) scripts +// { + # Convenience namespace for bulk access to apps and scripts + registry = { + apps = { + server = final.registry-server; + github-importer = final.registry-github-importer; + }; + scripts = prev.lib.mapAttrs (name: _: final."registry-${name}") scripts; + }; +} diff --git a/nix/module.nix b/nix/registry-server.nix similarity index 54% rename from nix/module.nix rename to nix/registry-server.nix index 70ca42e28..92f301832 100644 --- a/nix/module.nix +++ b/nix/registry-server.nix @@ -6,6 +6,28 @@ }: let cfg = config.services.registry-server; + + # Convert env vars attrset to .env file format + envFile = pkgs.writeText ".env" ( + lib.concatStringsSep "\n" (lib.mapAttrsToList (k: v: "${k}=${toString v}") cfg.envVars) + ); + + serverInit = pkgs.writeShellScriptBin "registry-server-init" '' + mkdir -p ${cfg.stateDir}/db + + set -o allexport + source ${envFile} + [ -f ${cfg.stateDir}/.env ] && source ${cfg.stateDir}/.env + set +o allexport + + export DATABASE_URL="sqlite:${cfg.stateDir}/db/registry.sqlite3" + + cd ${pkgs.registry-server}/bin + ${pkgs.dbmate}/bin/dbmate up + + cd ${cfg.stateDir} + exec ${pkgs.registry-server}/bin/registry-server + ''; in { options.services.registry-server = { @@ -26,13 +48,13 @@ in stateDir = lib.mkOption { type = lib.types.str; default = "/var/lib/registry-server"; - description = "The directory to store the registry server state (database, etc.)"; + description = "The directory to store the registry server state"; }; enableCerts = lib.mkOption { type = lib.types.bool; default = true; - description = "Whether to enable Let's Encrypt certificates for the registry server"; + description = "Whether to enable Let's Encrypt certificates"; }; envVars = lib.mkOption { @@ -40,23 +62,20 @@ in lib.types.either lib.types.str (lib.types.either lib.types.int lib.types.path) ); default = { }; - description = "Environment variables to set for the registry server"; + description = "Environment variables for the registry server"; }; }; config = lib.mkIf cfg.enable { - environment = { - systemPackages = [ - pkgs.vim - pkgs.git - ]; - }; + environment.systemPackages = [ + pkgs.vim + pkgs.git + ]; nix = { gc.automatic = true; settings = { auto-optimise-store = true; - # https://garnix.io/docs/caching substituters = [ "https://cache.garnix.io" ]; trusted-public-keys = [ "cache.garnix.io:CTFPyKSLcx5RMJKfLo5EEPUObbA78b0YQ2DTCJXqr9g=" ]; }; @@ -73,75 +92,37 @@ in users = { mutableUsers = false; - users = let deployers = import ./deployers.nix; in - pkgs.lib.mapAttrs (user: attrs: { + lib.mapAttrs (user: attrs: { isNormalUser = true; home = "/home/${user}"; extraGroups = [ "wheel" ]; - packages = [ - pkgs.rsync - pkgs.git - pkgs.curl - pkgs.coreutils - pkgs.vim + packages = with pkgs; [ + rsync + git + curl + coreutils + vim ]; openssh.authorizedKeys.keys = attrs.sshKeys; }) deployers; }; - systemd.services = - let - # Print an attrset of env vars { ENV_VAR = "value"; } as a newline-delimited - # string of "ENV_VAR=value" lines, then write the text to the Nix store. - printEnv = - vars: - pkgs.lib.concatStringsSep "\n" ( - pkgs.lib.mapAttrsToList ( - name: value: - if (builtins.typeOf value == "int") then "${name}=${toString value}" else "${name}=${value}" - ) vars - ); - defaultEnvFile = pkgs.writeText ".env" (printEnv cfg.envVars); - in - { - server = { - description = "registry server"; - wantedBy = [ - "multi-user.target" - "nginx.service" - ]; - serviceConfig = { - ExecStart = "${pkgs.writeShellScriptBin "registry-server-init" '' - # Ensure the state directory is available and initialize the database - mkdir -p ${cfg.stateDir}/db - - # Initialize environment variables - set -o allexport - source ${defaultEnvFile} - - # If a .env file exists in the stateDir then we will use it instead; - # this overwrites the cfg.envVars settings. - if [ -f ${cfg.stateDir}/.env ]; then - echo "Production .env file found! Values will overwrite the defaults." - source ${cfg.stateDir}/.env - fi - set +o allexport - - export DATABASE_URL="sqlite:${cfg.stateDir}/db/registry.sqlite3" - pushd ${pkgs.registry.apps.server}/bin - ${pkgs.dbmate}/bin/dbmate up - popd - - echo "Starting registry server..." - ${pkgs.registry.apps.server}/bin/registry-server - ''}/bin/registry-server-init"; - }; - }; + systemd.services.server = { + description = "registry server"; + wantedBy = [ + "multi-user.target" + "nginx.service" + ]; + serviceConfig = { + ExecStart = "${serverInit}/bin/registry-server-init"; + Type = "simple"; + Restart = "always"; }; + }; swapDevices = [ { @@ -178,11 +159,7 @@ in PureScript Registry - +

PureScript Registry

@@ -192,9 +169,7 @@ in ''; }; - locations."/api" = { - proxyPass = "http://127.0.0.1:${toString cfg.port}"; - }; + locations."/api".proxyPass = "http://127.0.0.1:${toString cfg.port}"; }; }; }; diff --git a/nix/test-vm.nix b/nix/test-vm.nix deleted file mode 100644 index 916866579..000000000 --- a/nix/test-vm.nix +++ /dev/null @@ -1,37 +0,0 @@ -# Machine configuration for the NixOS virtual machine suitable for testing. -{ - lib, - pkgs, - modulesPath, - ... -}: -{ - imports = [ - "${modulesPath}/virtualisation/qemu-vm.nix" - ./module.nix - ]; - - config = { - # https://github.com/utmapp/UTM/issues/2353 - networking.nameservers = lib.mkIf pkgs.stdenv.isDarwin [ "8.8.8.8" ]; - - # NOTE: Use 'shutdown now' to exit the VM. - services.getty.autologinUser = "root"; - - virtualisation = { - graphics = false; - host = { - inherit pkgs; - }; - forwardPorts = [ - { - from = "host"; - guest.port = 80; - host.port = 8080; - } - ]; - }; - - system.stateVersion = "23.11"; - }; -} diff --git a/nix/test/git-mock.mjs b/nix/test/git-mock.mjs new file mode 100644 index 000000000..df8068972 --- /dev/null +++ b/nix/test/git-mock.mjs @@ -0,0 +1,53 @@ +#!/usr/bin/env node + +/* + +Mock git binary for testing. Detects arguments to 'git' containing a URL +and replaces them with a local filepath. This is a drop-in replacement +for 'git' that should be used in offline / test environments when we only +want fixture data. + +*/ + +import { spawn } from "node:child_process"; + +const repoFixturesDir = process.env.REPO_FIXTURES_DIR; +if (!repoFixturesDir) { + throw new Error("REPO_FIXTURES_DIR is not set, but is required."); +} + +const gitBinary = process.env.GIT_BINARY; +if (!gitBinary) { + throw new Error("GIT_BINARY is not set, but is required."); +} + +// Replace any URL arguments with the local fixtures path. +function replaceIfUrl(arg) { + try { + const url = new URL(arg); + const path = url.pathname.replace(/\.git$/, ""); + const file = "file://" + repoFixturesDir + path; + console.log(file); + return file; + } catch (e) { + // Not a URL, ignore + } + return arg; +} + +const args = process.argv.slice(2); +const modified = args.map(replaceIfUrl); + +const git = spawn(gitBinary, modified); + +git.stdout.on("data", (data) => { + console.log(data.toString("utf8")); +}); + +git.stderr.on("data", (data) => { + console.error(data.toString("utf8")); +}); + +git.on("close", (code) => { + process.exit(code); +}); diff --git a/nix/test/integration.nix b/nix/test/integration.nix new file mode 100644 index 000000000..cc3309981 --- /dev/null +++ b/nix/test/integration.nix @@ -0,0 +1,394 @@ +# VM-based integration test for the registry server. This test deploys the actual service +# to a NixOS VM that matches our deploy environment, and then executes the core publishing +# workflow. The registry relies on several external services and tools that we don't +# control, so the APIs are mocked with WireMock and the Git commands are mocked with a +# wrapper CLI tool called `git-mock`. +# +# The integration test is set up such that the `prelude` package is already published to +# the registry, and the user is now publishing the `effect` package. This can be seen in +# the WireMock and Git fixture setup below. +{ + pkgs, + lib, + overlays, + rootPath, +}: + +if pkgs.stdenv.isDarwin then + pkgs.runCommand "integration-skip" { } '' + echo "Integration tests require Linux VMs, skipping on macOS" > $out + '' +else + let + # Port configuration - single source of truth + ports = { + server = 8080; + github = 9001; + bucket = 9002; + s3 = 9003; + pursuit = 9004; + }; + + stateDir = "/var/lib/registry-server"; + + # Git mock that redirects URLs to local fixtures; this is necessary because otherwise + # commands would reach out to GitHub or the other package origins. + gitMock = pkgs.writeShellScriptBin "git" '' + export GIT_BINARY="${pkgs.git}/bin/git" + exec ${pkgs.nodejs}/bin/node ${./git-mock.mjs} "$@" + ''; + + # WireMock NixOS module to make it easy to mock HTTP services the registry depends on. + wiremockModule = + { service }: + { + pkgs, + config, + lib, + ... + }: + let + cfg = config.services."wiremock-${service}"; + mappingsFormat = pkgs.formats.json { }; + rootDir = + let + mappingsJson = mappingsFormat.generate "mappings.json" { mappings = cfg.mappings; }; + in + pkgs.runCommand "wiremock-root" { } '' + mkdir -p $out/{mappings,__files} + cp ${mappingsJson} $out/mappings/mappings.json + ${lib.concatMapStrings (f: "cp ${f.path} $out/__files/${f.name}\n") cfg.files} + ''; + in + { + options.services."wiremock-${service}" = { + enable = lib.mkEnableOption "WireMock"; + port = lib.mkOption { + type = lib.types.int; + default = 8080; + }; + files = lib.mkOption { + type = lib.types.listOf ( + lib.types.submodule { + options = { + name = lib.mkOption { type = lib.types.str; }; + path = lib.mkOption { type = lib.types.path; }; + }; + } + ); + default = [ ]; + }; + mappings = lib.mkOption { + type = mappingsFormat.type; + default = [ ]; + }; + }; + + config = lib.mkIf cfg.enable { + systemd.services."wiremock-${service}" = { + description = "WireMock ${service}"; + wantedBy = [ "multi-user.target" ]; + serviceConfig = { + ExecStart = "${pkgs.wiremock}/bin/wiremock --port ${toString cfg.port} --root-dir ${rootDir} --disable-banner"; + Type = "simple"; + }; + }; + }; + }; + + parseEnv = import ../lib/parseEnv.nix { inherit lib; }; + envVars = parseEnv (rootPath + "/.env.example") // { + GITHUB_API_URL = "http://localhost:${toString ports.github}"; + S3_API_URL = "http://localhost:${toString ports.s3}"; + S3_BUCKET_URL = "http://localhost:${toString ports.bucket}"; + PURSUIT_API_URL = "http://localhost:${toString ports.pursuit}"; + REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; + }; + + setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' + set -e + mkdir -p ${stateDir}/repo-fixtures/purescript + git config --global user.email "pacchettibotti@purescript.org" + git config --global user.name "pacchettibotti" + git config --global init.defaultBranch "master" + + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} ${stateDir}/repo-fixtures/purescript/ + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 ${stateDir}/repo-fixtures/purescript/purescript-effect + + for repo in ${stateDir}/repo-fixtures/purescript/*/; do + cd "$repo" + git init && git add . && git commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + done + + git -C ${stateDir}/repo-fixtures/purescript/package-sets tag -m "psc-0.15.4-20230105" psc-0.15.4-20230105 + git -C ${stateDir}/repo-fixtures/purescript/purescript-effect tag -m "v4.0.0" v4.0.0 + ''; + + publishPayload = pkgs.writeText "publish-effect.json" ( + builtins.toJSON { + name = "effect"; + ref = "v4.0.0"; + compiler = "0.15.4"; + location = { + githubOwner = "purescript"; + githubRepo = "purescript-effect"; + }; + } + ); + in + pkgs.testers.nixosTest { + name = "registry-integration"; + + testScript = '' + import json + import time + + # Start registry and set up git fixtures + registry.start() + registry.succeed("${setupGitFixtures}/bin/setup-git-fixtures") + + # Wait for all services to be ready + registry.wait_for_unit("wiremock-github-api.service") + registry.wait_for_unit("wiremock-s3-api.service") + registry.wait_for_unit("wiremock-bucket-api.service") + registry.wait_for_unit("wiremock-pursuit-api.service") + registry.wait_for_unit("server.service") + + # Start client and wait for API + client.start() + client.wait_until_succeeds( + "curl --fail-with-body http://registry/api/v1/jobs", + timeout=20 + ) + + # Publish a package + result = json.loads(client.succeed( + "curl -s -X POST -d @${publishPayload} -H 'Content-Type: application/json' " + "http://registry/api/v1/publish" + )) + + job_id = result["jobId"] + assert len(job_id) == 36, f"Expected job ID, got: {result}" + print(f"Job created: {job_id}") + + # Poll for completion + for attempt in range(20): + time.sleep(3) + poll = json.loads(client.succeed( + f"curl -s 'http://registry/api/v1/jobs/{job_id}" + "?since=2023-01-01T00:00:00Z&level=DEBUG'" + )) + + if "finishedAt" in poll: + assert poll["success"], f"Job failed: {poll}" + print("✓ Job completed successfully") + break + else: + raise Exception("Job did not complete in time") + ''; + + # This section defines the machine, configuring the Wiremock instances to + # mock external APIs, overriding Git with the mocked version, and setting + # up the actual Wiremock data to return. The machine is based on the + # same registry-server Nix module we deploy. + nodes.client.virtualisation.graphics = false; + nodes.registry = { + imports = [ + (wiremockModule { service = "github-api"; }) + (wiremockModule { service = "s3-api"; }) + (wiremockModule { service = "bucket-api"; }) + (wiremockModule { service = "pursuit-api"; }) + (rootPath + "/nix/registry-server.nix") + ]; + + # We replace Git in registry-runtime-deps with our custom mocked Git which + # prevents reaching out over the network. We override registry-runtime-deps + # to substitute the mock, which causes registry-server to be rebuilt with it. + nixpkgs.overlays = overlays ++ [ + (_: prev: { + registry-runtime-deps = map ( + pkg: if pkg == prev.git then gitMock else pkg + ) prev.registry-runtime-deps; + }) + ]; + + virtualisation.graphics = false; + + # Finally, we define the running services on the machine: the registry, + # and then the various wiremock servers. + services.registry-server = { + enable = true; + host = "localhost"; + port = ports.server; + enableCerts = false; + inherit stateDir envVars; + }; + + # GitHub API mock - returns base64-encoded content like the real API + services.wiremock-github-api = { + enable = true; + port = ports.github; + mappings = + let + # Helper to create GitHub contents API response, as it returns base64-encoded content + base64Response = + { + url, + fileName, + filePath, + }: + { + request = { + method = "GET"; + inherit url; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + jsonBody = { + type = "file"; + encoding = "base64"; + name = fileName; + path = fileName; + # Base64 encode the file content using Nix builtins + content = builtins.readFile ( + pkgs.runCommand "base64-${fileName}" { } '' + base64 -w 0 ${filePath} > $out + '' + ); + }; + }; + }; + + effectBase64Response = + fileName: + base64Response { + url = "/repos/purescript/purescript-effect/contents/${fileName}?ref=v4.0.0"; + fileName = fileName; + filePath = rootPath + "/app/fixtures/github-packages/effect-4.0.0/${fileName}"; + }; + in + [ + (effectBase64Response "bower.json") + (effectBase64Response "LICENSE") + + { + request = { + method = "GET"; + url = "/repos/purescript/package-sets/tags"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + jsonBody = { + name = "psc-0.15.10-20230105"; + commit = { + sha = "090897c992b2b310b1456506308db789672adac1"; + url = "https://api.github.com/repos/purescript/package-sets/commits/090897c992b2b310b1456506308db789672adac1"; + }; + }; + }; + } + ]; + }; + + # S3 API mock - serves package tarballs + services.wiremock-s3-api = { + enable = true; + port = ports.s3; + files = [ + { + name = "prelude-6.0.1.tar.gz"; + path = rootPath + "/app/fixtures/registry-storage/prelude-6.0.1.tar.gz"; + } + { + name = "type-equality-4.0.1.tar.gz"; + path = rootPath + "/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz"; + } + ]; + mappings = [ + { + request = { + method = "GET"; + url = "/prelude/6.0.1.tar.gz"; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = "prelude-6.0.1.tar.gz"; + }; + } + { + request = { + method = "GET"; + url = "/type-equality/4.0.1.tar.gz"; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = "type-equality-4.0.1.tar.gz"; + }; + } + ]; + }; + + # S3 Bucket API mock - handles upload/list operations + services.wiremock-bucket-api = { + enable = true; + port = ports.bucket; + mappings = [ + { + request.method = "GET"; + response = { + status = 200; + body = ''prelude/6.0.1.tar.gz16298"abc123"type-equality/4.0.1.tar.gz2184"def456"''; + }; + } + { + request = { + method = "PUT"; + url = "/effect/4.0.0.tar.gz?x-id=PutObject"; + }; + response = { + status = 200; + body = ''"abc123"''; + }; + } + { + request = { + method = "PUT"; + url = "/prelude/6.0.1.tar.gz?x-id=PutObject"; + }; + response.status = 500; + } + ]; + }; + + # Pursuit API mock - documentation hosting + services.wiremock-pursuit-api = { + enable = true; + port = ports.pursuit; + mappings = [ + { + request = { + method = "GET"; + url = "/packages/purescript-prelude/available-versions"; + }; + response = { + status = 200; + body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; + }; + } + { + request = { + method = "POST"; + url = "/packages"; + }; + response.status = 201; + } + ]; + }; + }; + + } diff --git a/nix/wiremock.nix b/nix/wiremock.nix deleted file mode 100644 index 3db525674..000000000 --- a/nix/wiremock.nix +++ /dev/null @@ -1,114 +0,0 @@ -{ service }: -{ - pkgs, - config, - lib, - ... -}: -with lib; -let - cfg = config.services."wiremock-${service}"; - mappingsFormat = pkgs.formats.json { }; - rootDir = - let - mappingsJson = mappingsFormat.generate "mappings.json" { mappings = cfg.mappings; }; - in - pkgs.runCommand "wiremock-root" - { - preferLocalBuild = true; - allowSubstitutes = false; - } - '' - mkdir -p $out - cd $out - - mkdir mappings - cp ${mappingsJson} mappings/mappings.json - - mkdir __files - ${lib.concatMapStrings (attrs: "cp ${attrs.path} __files/${attrs.name}") cfg.files} - ''; -in -{ - options.services."wiremock-${service}" = { - enable = mkEnableOption "WireMock"; - - port = mkOption { - type = types.int; - default = 8080; - }; - - verbose = mkOption { - type = types.bool; - default = false; - }; - - files = mkOption { - description = '' - List of files to include in the __files directory for access when stubbing. - ''; - default = [ ]; - example = { - name = "file-name.json"; - path = ""; - }; - }; - - mappings = mkOption { - type = mappingsFormat.type; - description = '' - See the for more information. - ''; - default = [ ]; - example = [ - { - request = { - method = "GET"; - url = "/body"; - }; - response = { - status = 200; - headers."Content-Type" = "text/plain"; - body = "Literal text to put in the body"; - }; - } - { - request = { - method = "GET"; - url = "/json"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - someField = "someValue"; - }; - }; - } - ]; - }; - }; - - config = mkIf cfg.enable { - systemd.services."wiremock-${service}" = - let - arguments = [ - "--port ${toString cfg.port}" - "--root-dir ${rootDir}" - "--disable-banner" - ] ++ lib.optional cfg.verbose "--verbose"; - in - { - description = "registry server"; - wantedBy = [ - "multi-user.target" - "nginx.service" - ]; - serviceConfig = { - ExecStart = "${pkgs.writeShellScriptBin "wiremock-${service}-init" '' - ${pkgs.wiremock}/bin/wiremock ${lib.concatStringsSep " " arguments} "$@" - ''}/bin/wiremock-${service}-init"; - }; - }; - }; -} diff --git a/package-lock.json b/package-lock.json index 93959c062..69c25275e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,10 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } }, "app": { "name": "registry-app", @@ -1598,6 +1601,12 @@ "node": ">=14.0.0" } }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -1648,6 +1657,15 @@ "prebuild-install": "^7.1.1" } }, + "node_modules/big-integer": { + "version": "1.6.52", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", + "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", + "license": "Unlicense", + "engines": { + "node": ">=0.6" + } + }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1719,6 +1737,21 @@ "node": ">=10.0.0" } }, + "node_modules/bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "license": "MIT", + "dependencies": { + "run-applescript": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -1741,6 +1774,20 @@ "node": ">=10.0.0" } }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -1763,6 +1810,52 @@ "node": ">=4.0.0" } }, + "node_modules/default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "license": "MIT", + "dependencies": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "license": "MIT", + "dependencies": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", @@ -1784,6 +1877,50 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "license": "BSD-2-Clause", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", + "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -1900,6 +2037,18 @@ "node": ">=10" } }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -1921,6 +2070,15 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, + "node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=14.18.0" + } + }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -1950,6 +2108,21 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -1969,6 +2142,24 @@ "node": ">=0.10.0" } }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -1985,6 +2176,51 @@ "node": ">=0.10.0" } }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-wsl/node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -2004,6 +2240,15 @@ "jsonrepair": "bin/cli.js" } }, + "node_modules/linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "license": "MIT", + "dependencies": { + "uc.micro": "^1.0.1" + } + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -2015,6 +2260,34 @@ "node": ">=10" } }, + "node_modules/markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", + "license": "MIT" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "license": "MIT" + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -2035,6 +2308,18 @@ "node": ">=8.6" } }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -2142,6 +2427,33 @@ } } }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -2195,6 +2507,15 @@ "once": "^1.3.1" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -2322,6 +2643,33 @@ "node": ">=10" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -2365,6 +2713,34 @@ "simple-concat": "^1.0.0" } }, + "node_modules/spago": { + "version": "0.93.19", + "resolved": "https://registry.npmjs.org/spago/-/spago-0.93.19.tgz", + "integrity": "sha512-BOSwPQSbULxlFmTjf5YXrvQtvQjRsqHdcbHo60ENbj4W1N8yPlyWKHzgRiayi7VE4av+d0v6x1OBGGL5lO+vsQ==", + "license": "BSD-3-Clause", + "dependencies": { + "better-sqlite3": "^8.6.0", + "env-paths": "^3.0.0", + "fast-glob": "^3.2.11", + "fs-extra": "^10.0.0", + "fuse.js": "^6.5.3", + "glob": "^7.1.6", + "markdown-it": "^12.0.4", + "open": "^9.1.0", + "punycode": "^2.3.0", + "semver": "^7.3.5", + "spdx-expression-parse": "^3.0.1", + "ssh2": "^1.14.0", + "supports-color": "^9.2.3", + "tar": "^6.1.11", + "tmp": "^0.2.1", + "xhr2": "^0.2.1", + "yaml": "^2.1.1" + }, + "bin": { + "spago": "bin/bundle.js" + } + }, "node_modules/spdx-exceptions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", @@ -2409,6 +2785,18 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -2422,6 +2810,18 @@ "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" }, + "node_modules/supports-color": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz", + "integrity": "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", @@ -2469,6 +2869,18 @@ "node": ">=6" } }, + "node_modules/titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tmp": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", @@ -2515,6 +2927,12 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "license": "MIT" + }, "node_modules/universal-user-agent": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", @@ -2528,6 +2946,15 @@ "node": ">= 10.0.0" } }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -2555,11 +2982,35 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/xhr2": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/xhr2/-/xhr2-0.2.1.tgz", + "integrity": "sha512-sID0rrVCqkVNUn8t6xuv9+6FViXjUVXq8H5rWOH2rz9fDNQEd4g0EA2XlcEdJXRz5BMEn4O1pJFdT+z4YHhoWw==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index 76bc4e96e..5066e42c0 100644 --- a/package.json +++ b/package.json @@ -6,5 +6,8 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } } diff --git a/run-local-server.sh b/run-local-server.sh new file mode 100755 index 000000000..183fd6c3f --- /dev/null +++ b/run-local-server.sh @@ -0,0 +1,183 @@ +#!/usr/bin/env bash + +# Script to run the PureScript Registry server locally without VM. +# +# The script starts a bunch of external service mocks using WireMock, creates a +# Sqlite DB (if doesn't exist yet) and sets up overriding .env file pointing to +# those mock services and the DB. All of that is kept under `.temp/local-server/`. +# To reset the environment, nuke that directory. + +set -euo pipefail + +# Configuration +MOCK_GITHUB_PORT=9001 +MOCK_BUCKET_PORT=9002 +MOCK_S3_PORT=9003 +MOCK_PURSUIT_PORT=9004 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PIDS=() +CACHE_DIR="$SCRIPT_DIR/.temp/local-server" +mkdir -p "$CACHE_DIR" + +# Cleanup function +cleanup() { + for pid in "${PIDS[@]}"; do + if kill -0 "$pid" 2>/dev/null; then + kill "$pid" 2>/dev/null || true + fi + done + + echo "Killed all mock services." +} + +# Set up cleanup trap +trap cleanup EXIT INT TERM + +# Function to start a wiremock service +start_wiremock() { + local service_name=$1 + local port=$2 + local mappings=$3 + local service_dir="$CACHE_DIR/wiremock-$service_name" + mkdir -p "$service_dir/mappings" "$service_dir/__files" + echo > "$service_dir/mappings/mappings.json" "$mappings" + cp "$SCRIPT_DIR/app/fixtures/registry-storage"/*.tar.gz "$service_dir/__files/" 2>/dev/null || true + + # Start wiremock + nix run nixpkgs#wiremock -- \ + --port "$port" \ + --root-dir "$service_dir" \ + --disable-banner \ + --verbose & + + local pid=$! + PIDS+=("$pid") +} + +mkdir -p "$CACHE_DIR/repo-fixtures/purescript" +cp -r "$SCRIPT_DIR/app/fixtures/github-packages"/* "$CACHE_DIR/repo-fixtures/purescript/" 2>/dev/null || true +cp -r "$SCRIPT_DIR/app/fixtures/registry-index" "$CACHE_DIR/repo-fixtures/purescript/" 2>/dev/null || true +cp -r "$SCRIPT_DIR/app/fixtures/package-sets" "$CACHE_DIR/repo-fixtures/purescript/" 2>/dev/null || true + +start_wiremock "github-api" $MOCK_GITHUB_PORT ' +{ + "mappings": [ + { + "request": { + "method": "GET", + "url": "/repos/purescript/purescript-effect/contents/bower.json?ref=v4.0.0" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/json" + }, + "jsonBody": { + "type": "file", + "encoding": "base64", + "content": "ewogICJuYW1lIjogInB1cmVzY3JpcHQtZWZmZWN0IiwKICAiaG9tZXBhZ2UiOiAiaHR0cHM6Ly9naXRodWIuY29tL3B1cmVzY3JpcHQvcHVyZXNjcmlwdC1lZmZlY3QiLAogICJsaWNlbnNlIjogIkJTRC0zLUNsYXVzZSIsCiAgInJlcG9zaXRvcnkiOiB7CiAgICAidHlwZSI6ICJnaXQiLAogICAgInVybCI6ICJodHRwczovL2dpdGh1Yi5jb20vcHVyZXNjcmlwdC9wdXJlc2NyaXB0LWVmZmVjdC5naXQiCiAgfSwKICAiaWdub3JlIjogWwogICAgIioqLy4qIiwKICAgICJib3dlcl9jb21wb25lbnRzIiwKICAgICJub2RlX21vZHVsZXMiLAogICAgIm91dHB1dCIsCiAgICAidGVzdCIsCiAgICAiYm93ZXIuanNvbiIsCiAgICAicGFja2FnZS5qc29uIgogIF0sCiAgImRlcGVuZGVuY2llcyI6IHsKICAgICJwdXJlc2NyaXB0LXByZWx1ZGUiOiAiXjYuMC4wIgogIH0KfQo=" + } + } + }, + { + "request": { + "method": "GET", + "url": "/repos/purescript/package-sets/tags" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/json" + }, + "jsonBody": { + "name": "psc-0.15.10-20230105", + "commit": { + "sha": "090897c992b2b310b1456506308db789672adac1", + "url": "https://api.github.com/repos/purescript/package-sets/commits/090897c992b2b310b1456506308db789672adac1" + } + } + } + } + ] +}' + +start_wiremock "s3-api" $MOCK_S3_PORT ' +{ + "mappings": [ + { + "request": { + "method": "GET", + "url": "/prelude/6.0.1.tar.gz" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/octet-stream" + }, + "bodyFileName": "prelude-6.0.1.tar.gz" + } + } + ] +}' + +start_wiremock "bucket-api" $MOCK_BUCKET_PORT ' +{ + "mappings": [ + { + "request": { + "method": "GET", + "urlPattern": "/.*" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/xml" + }, + "body": "" + } + } + ] +}' + +start_wiremock "pursuit-api" $MOCK_PURSUIT_PORT ' +{ + "mappings": [ + { + "request": { + "method": "POST", + "urlPattern": "/packages.*" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/json" + }, + "jsonBody": { + "success": true + } + } + } + ] +}' + +if [ ! -f "$SCRIPT_DIR/.env" ]; then + cp "$SCRIPT_DIR/.env.example" "$SCRIPT_DIR/.env" +fi + +if [ ! -f "$CACHE_DIR/registry.sqlite3" ]; then + sqlite3 "$CACHE_DIR/registry.sqlite3" < "$SCRIPT_DIR/db/schema.sql" +fi + + cat > "$CACHE_DIR/.env.local" <<-END +DATABASE_URL="sqlite:$CACHE_DIR/registry.sqlite3" +DHALL_TYPES="$SCRIPT_DIR/types" +GITHUB_API_URL=http://localhost:$MOCK_GITHUB_PORT +S3_API_URL=http://localhost:$MOCK_S3_PORT +S3_BUCKET_URL=http://localhost:$MOCK_BUCKET_PORT +PURSUIT_API_URL=http://localhost:$MOCK_PURSUIT_PORT +END + +# Using a specific version of Spago until the new lockfile structure is +# supported by the PureScript Nix overlay. +npx --yes spago@0.93.19 run -p registry-app diff --git a/scripts/default.nix b/scripts/default.nix deleted file mode 100644 index 43c043d15..000000000 --- a/scripts/default.nix +++ /dev/null @@ -1,82 +0,0 @@ -{ - makeWrapper, - lib, - stdenv, - esbuild, - nodejs, - writeText, - compilers, - purs-versions, - dhall, - dhall-json, - licensee, - git, - git-lfs, - coreutils, - gzip, - gnutar, - # from the registry at the top level - spago-lock, - package-lock, -}: -let - build-script = - name: module: - stdenv.mkDerivation rec { - inherit name; - src = ./src; - nativeBuildInputs = [ - esbuild - makeWrapper - ]; - buildInputs = [ nodejs ]; - entrypoint = writeText "entrypoint.js" '' - import { main } from "./output/Registry.Scripts.${module}"; - main(); - ''; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - ln -s ${spago-lock}/output . - cp ${entrypoint} entrypoint.js - esbuild entrypoint.js --bundle --outfile=${name}.js --platform=node --packages=external - ''; - installPhase = '' - mkdir -p $out/bin - - echo "Copying files..." - cp ${name}.js $out/${name}.js - ln -s ${package-lock}/js/node_modules $out - - echo "Creating wrapper script..." - echo '#!/usr/bin/env sh' > $out/bin/${name} - echo 'exec ${nodejs}/bin/node '"$out/${name}.js"' "$@"' >> $out/bin/${name} - chmod +x $out/bin/${name} - ''; - postFixup = '' - wrapProgram $out/bin/${name} \ - --set PATH ${ - lib.makeBinPath [ - compilers - purs-versions - dhall - dhall-json - licensee - git - git-lfs - coreutils - gzip - gnutar - ] - } - ''; - }; -in -{ - legacy-importer = build-script "registry-legacy-importer" "LegacyImporter"; - package-deleter = build-script "registry-package-deleter" "PackageDeleter"; - package-set-updater = build-script "registry-package-set-updater" "PackageSetUpdater"; - package-transferrer = build-script "registry-package-transferrer" "PackageTransferrer"; - solver = build-script "registry-solver" "Solver"; - verify-integrity = build-script "registry-verify-integrity" "VerifyIntegrity"; - compiler-versions = build-script "registry-compiler-versions" "CompilerVersions"; -} diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index a9f0079b5..0fdc94a06 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -12,26 +12,37 @@ import ArgParse.Basic (ArgParser) import ArgParse.Basic as Arg import Control.Apply (lift2) import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Compactable (separate) +import Data.DateTime (Date, Month(..)) +import Data.DateTime as DateTime +import Data.Enum (toEnum) import Data.Exists as Exists import Data.Filterable (partition) import Data.Foldable (foldMap) import Data.Foldable as Foldable import Data.Formatter.DateTime as Formatter.DateTime +import Data.Function (on) import Data.FunctorWithIndex (mapWithIndex) import Data.List as List +import Data.List.NonEmpty as NonEmptyList import Data.Map as Map import Data.Ordering (invert) import Data.Profunctor as Profunctor import Data.Set as Set +import Data.Set.NonEmpty (NonEmptySet) +import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits +import Data.These (These(..)) +import Data.Tuple (uncurry) import Data.Variant as Variant import Effect.Class.Console as Console +import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process import Parsing (Parser) @@ -40,39 +51,49 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic +import Registry.App.API (COMPILER_CACHE) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry as Registry import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage import Registry.App.Legacy.LenientVersion (LenientVersion) import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestValidationError) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) +import Registry.App.Manifest.SpagoYaml as SpagoYaml import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec (packageMap, versionMap) +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.Location as Location import Registry.Manifest as Manifest import Registry.ManifestIndex as ManifestIndex -import Registry.Operation (PublishData) import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver (CompilerIndex(..)) +import Registry.Solver as Solver import Registry.Version as Version -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Run.Except -import Spago.Generated.BuildInfo as BuildInfo import Type.Proxy (Proxy(..)) data ImportMode = DryRun | GenerateRegistry | UpdateRegistry @@ -127,7 +148,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) GenerateRegistry -> do @@ -138,7 +159,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash (Registry.CommitAs (Git.pacchettibottiCommitter token)))) >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) UpdateRegistry -> do @@ -149,7 +170,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.ForceClean (Registry.CommitAs (Git.pacchettibottiCommitter token)))) >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret (Pursuit.handleAff token) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Recent) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) -- Logging setup @@ -161,18 +182,19 @@ main = launchAff_ do logFile = "legacy-importer-" <> String.take 19 (Formatter.DateTime.format Internal.Format.iso8601DateTime now) <> ".log" logPath = Path.concat [ logDir, logFile ] - runLegacyImport mode logPath + runLegacyImport logPath # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' -runLegacyImport :: forall r. ImportMode -> FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit -runLegacyImport mode logs = do +runLegacyImport :: forall r. FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit +runLegacyImport logs = do Log.info "Starting legacy import!" Log.info $ "Logs available at " <> logs @@ -204,108 +226,302 @@ runLegacyImport mode logs = do pure $ fixupNames allPackages Log.info $ "Read " <> show (Set.size (Map.keys legacyRegistry)) <> " package names from the legacy registry." + + Log.info "Reading reserved 0.13 packages..." + reserved0_13 <- readPackagesMetadata >>= case _ of + Left err -> do + Log.warn $ "Could not read reserved packages: " <> err + Log.warn $ "Determining reserved packages..." + metadata <- getPackagesMetadata legacyRegistry + let cutoff = filterPackages_0_13 metadata + writePackagesMetadata cutoff + pure cutoff + Right cutoff -> pure cutoff + + Log.info $ "Reserving metadata files for 0.13 and purs/metadata packages" + forWithIndex_ reserved0_13 \package { address } -> Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for reserved 0.13 package " <> PackageName.print package + let location = GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> Log.debug $ PackageName.print package <> " already reserved." + + let metadataPackage = unsafeFromRight (PackageName.parse "metadata") + let pursPackage = unsafeFromRight (PackageName.parse "purs") + let purescriptPackage = unsafeFromRight (PackageName.parse "purescript") + for_ [ metadataPackage, pursPackage, purescriptPackage ] \package -> + Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for " <> PackageName.print package + let location = GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print package, subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> pure unit + importedIndex <- importLegacyRegistry legacyRegistry Log.info "Writing package and version failures to disk..." Run.liftAff $ writePackageFailures importedIndex.failedPackages Run.liftAff $ writeVersionFailures importedIndex.failedVersions - Log.info "Writing empty metadata files for legacy packages that can't be registered..." - void $ forWithIndex importedIndex.reservedPackages \package location -> do - Registry.readMetadata package >>= case _ of - Nothing -> do - let metadata = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } - Registry.writeMetadata package metadata - Just _ -> pure unit - Log.info "Ready for upload!" - - Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex + let importStats = calculateImportStats legacyRegistry importedIndex + let formattedStats = formatImportStats importStats + Log.info formattedStats + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "import-stats.txt" ]) formattedStats Log.info "Sorting packages for upload..." - let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.IgnoreRanges importedIndex.registryIndex - - Log.info "Removing packages that previously failed publish" - indexPackages <- allIndexPackages # Array.filterA \(Manifest { name, version }) -> - isNothing <$> Cache.get _importCache (PublishFailure name version) + let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges importedIndex.registryIndex + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "sorted-packages.txt" ]) $ String.joinWith "\n" $ map (\(Manifest { name, version }) -> PackageName.print name <> "@" <> Version.print version) allIndexPackages - allMetadata <- Registry.readAllMetadata - - -- This record comes from the build directory (.spago) and records information - -- from the most recent build. - let compiler = unsafeFromRight (Version.parse BuildInfo.pursVersion) - - -- Just a safety check to ensure the compiler used in the pipeline is not too - -- low. Should be bumped from time to time to the latest compiler. - let minCompiler = unsafeFromRight (Version.parse "0.15.7") - when (compiler < minCompiler) do - Run.Except.throw $ "Local compiler " <> Version.print compiler <> " is too low (min: " <> Version.print minCompiler <> ")." - - Log.info $ "Using compiler " <> Version.print compiler + Log.info "Removing packages that previously failed publish or have been published" + publishable <- do + allMetadata <- Registry.readAllMetadata + allIndexPackages # Array.filterA \(Manifest { name, version }) -> do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure $ not $ hasMetadata allMetadata name version + Just _ -> pure false + + allCompilers <- PursVersions.pursVersions + allCompilersRange <- case Range.mk (NonEmptyArray.head allCompilers) (Version.bumpPatch (NonEmptyArray.last allCompilers)) of + Nothing -> Run.Except.throw $ "Failed to construct a compiler range from " <> Version.print (NonEmptyArray.head allCompilers) <> " and " <> Version.print (NonEmptyArray.last allCompilers) + Just range -> do + Log.info $ "All available compilers range: " <> Range.print range + pure range let - isPublished { name, version } = hasMetadata allMetadata name version - notPublished = indexPackages # Array.filter \(Manifest manifest) -> not (isPublished manifest) - - mkOperation :: Manifest -> Run _ PublishData - mkOperation (Manifest manifest) = - case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of - Nothing -> do - let formatted = formatPackageVersion manifest.name manifest.version - Log.error $ "Unable to recover package ref for " <> formatted - Run.Except.throw $ "Failed to create publish operation for " <> formatted - Just ref -> - pure - { location: Just manifest.location - , name: manifest.name - , ref: un RawVersion ref - , compiler - , resolutions: Nothing - } - - case notPublished of + publishLegacyPackage :: Solver.TransitivizedRegistry -> Manifest -> Run _ Unit + publishLegacyPackage legacyIndex (Manifest manifest) = do + let formatted = formatPackageVersion manifest.name manifest.version + Log.info $ "\n----------\nPUBLISHING: " <> formatted <> "\n----------\n" + RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of + Nothing -> Run.Except.throw $ "Unable to recover package ref for " <> formatted + Just ref -> pure ref + + Log.debug "Building dependency index with compiler versions..." + compilerIndex <- API.readCompilerIndex + + Log.debug $ "Solving dependencies for " <> formatted + eitherResolutions <- do + let toErrors = map Solver.printSolverError <<< NonEmptyList.toUnfoldable + let isCompilerSolveError = String.contains (String.Pattern "Conflict in version ranges for purs:") + let partitionIsCompiler = partitionEithers <<< map (\error -> if isCompilerSolveError error then Right error else Left error) + + legacySolution <- case Solver.solveFull { registry: legacyIndex, required: Solver.initializeRequired manifest.dependencies } of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with legacy index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right resolutions -> do + Log.debug $ "Solved " <> formatted <> " with legacy index." + -- The solutions do us no good if the dependencies don't exist. Note + -- the compiler index is updated on every publish. + let lookupInRegistry res = maybe (Left res) (\_ -> Right res) (Map.lookup (fst res) (un CompilerIndex compilerIndex) >>= Map.lookup (snd res)) + let { fail: notRegistered } = partitionEithers $ map lookupInRegistry $ Map.toUnfoldable resolutions + if (Array.null notRegistered) then + pure $ Right resolutions + else do + let missing = "Some resolutions from legacy index are not registered: " <> String.joinWith ", " (map (uncurry formatPackageVersion) notRegistered) + Log.warn missing + Log.warn "Not using legacy index resolutions for this package." + pure $ Left $ SolveFailedDependencies missing + + currentSolution <- case Solver.solveWithCompiler allCompilersRange compilerIndex manifest.dependencies of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with current index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right (Tuple _ resolutions) -> do + Log.debug $ "Solved " <> formatted <> " with contemporary index." + pure $ Right resolutions + + pure $ case legacySolution, currentSolution of + Left err, Left _ -> Left err + Right resolutions, Left _ -> Right $ This resolutions + Left _, Right resolutions -> Right $ That resolutions + Right legacyResolutions, Right currentResolutions -> Right $ Both legacyResolutions currentResolutions + + case eitherResolutions of + -- We skip if we couldn't solve (but we write the error to cache). + Left err -> + Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right resolutionOptions -> do + Log.info "Selecting usable compiler from resolutions..." + + let + findFirstFromResolutions :: Map PackageName Version -> Run _ (Either (Map Version CompilerFailure) Version) + findFirstFromResolutions resolutions = do + Log.debug $ "Finding compiler for " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + possibleCompilers <- + if Map.isEmpty manifest.dependencies then do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + else do + Log.debug "No compiler version was produced by the solver, so all compilers are potentially compatible." + allMetadata <- Registry.readAllMetadata + case compatibleCompilers allMetadata resolutions of + Left [] -> do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + Left errors -> do + let + printError { packages, compilers } = do + let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages + let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers + key <> " support compilers " <> val + Log.warn $ Array.fold + [ "Resolutions admit no overlapping compiler versions:\n" + , Array.foldMap (append "\n - " <<< printError) errors + ] + pure $ NonEmptySet.fromFoldable1 allCompilers + Right compilers -> do + Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CJ.array Version.codec) (NonEmptySet.toUnfoldable compilers) + pure compilers + + cached <- do + cached <- for (NonEmptySet.toUnfoldable possibleCompilers) \compiler -> + Cache.get API._compilerCache (API.Compilation (Manifest manifest) resolutions compiler) >>= case _ of + Nothing -> pure Nothing + Just { result: Left _ } -> pure Nothing + Just { target, result: Right _ } -> pure $ Just target + pure $ NonEmptyArray.fromArray $ Array.catMaybes cached + + case cached of + Just prev -> do + let selected = NonEmptyArray.last prev + Log.debug $ "Found successful cached compilation for " <> formatted <> " and chose " <> Version.print selected + pure $ Right selected + Nothing -> do + Log.debug $ "No cached compilation for " <> formatted <> ", so compiling with all compilers to find first working one." + Log.debug "Fetching source and installing dependencies to test compilers" + tmp <- Tmp.mkTmpDir + { path } <- Source.fetch tmp manifest.location ref + Log.debug $ "Downloaded source to " <> path + Log.debug "Downloading dependencies..." + let installDir = Path.concat [ tmp, ".registry" ] + FS.Extra.ensureDirectory installDir + API.installBuildPlan resolutions installDir + Log.debug $ "Installed to " <> installDir + Log.debug "Trying compilers one-by-one..." + selected <- findFirstCompiler + { source: path + , installed: installDir + , compilers: NonEmptySet.toUnfoldable possibleCompilers + , resolutions + , manifest: Manifest manifest + } + FS.Extra.remove tmp + pure selected + + let + collectCompilerErrors :: Map Version CompilerFailure -> Map (NonEmptyArray Version) CompilerFailure + collectCompilerErrors failures = do + let + foldFn prev xs = do + let Tuple _ failure = NonEmptyArray.head xs + let key = map fst xs + Map.insert key failure prev + Array.foldl foldFn Map.empty $ Array.groupAllBy (compare `on` snd) (Map.toUnfoldable failures) + + reportFailures :: forall a. _ -> Run _ (Either PublishError a) + reportFailures failures = do + let collected = collectCompilerErrors failures + Log.error $ "Failed to find any valid compilers for publishing:\n" <> printJson compilerFailureMapCodec collected + pure $ Left $ NoCompilersFound collected + + -- Here, we finally attempt to find a suitable compiler. If we only + -- got one set of working resolutions that's what we use. If we got + -- solutions with both the legacy and adjusted-manifest indices, then + -- we try the adjusted index first since that's what is used in the + -- publish pipeline. + eitherCompiler <- case resolutionOptions of + This legacyResolutions -> do + selected <- findFirstFromResolutions legacyResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + That currentResolutions -> do + selected <- findFirstFromResolutions currentResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Both legacyResolutions currentResolutions -> do + selectedCurrent <- findFirstFromResolutions currentResolutions + case selectedCurrent of + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Left currentFailures | legacyResolutions == currentResolutions -> reportFailures currentFailures + Left _ -> do + Log.info $ "Could not find suitable compiler from current index, trying legacy solution..." + selectedLegacy <- findFirstFromResolutions legacyResolutions + case selectedLegacy of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + + case eitherCompiler of + Left err -> Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right (Tuple compiler resolutions) -> do + Log.debug $ "Selected " <> Version.print compiler <> " for publishing." + let + payload = + { name: manifest.name + , location: Just manifest.location + , ref + , version: manifest.version + , compiler + , resolutions: Just resolutions + } + Run.Except.runExcept (API.publish (Just legacyIndex) payload) >>= case _ of + Left error -> do + Log.error $ "Failed to publish " <> formatted <> ": " <> error + Cache.put _importCache (PublishFailure manifest.name manifest.version) (PublishError error) + Right _ -> do + Log.info $ "Published " <> formatted + + case publishable of [] -> Log.info "No packages to publish." manifests -> do - let printPackage (Manifest { name, version }) = formatPackageVersion name version Log.info $ Array.foldMap (append "\n") [ "----------" , "AVAILABLE TO PUBLISH" - , "" - , " using purs " <> Version.print compiler - , "" + , Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) manifests , "----------" - , Array.foldMap (append "\n - " <<< printPackage) manifests ] - let - source = case mode of - DryRun -> LegacyPackage - GenerateRegistry -> LegacyPackage - UpdateRegistry -> CurrentPackage - - void $ for notPublished \(Manifest manifest) -> do - let formatted = formatPackageVersion manifest.name manifest.version - Log.info $ Array.foldMap (append "\n") - [ "----------" - , "PUBLISHING: " <> formatted - , stringifyJson Location.codec manifest.location - , "----------" - ] - operation <- mkOperation (Manifest manifest) - - result <- Run.Except.runExcept $ API.publish source operation - -- TODO: Some packages will fail because the legacy importer does not - -- perform all the same validation checks that the publishing flow does. - -- What should we do when a package has a valid manifest but fails for - -- other reasons? Should they be added to the package validation - -- failures and we defer writing the package failures until the import - -- has completed? - case result of - Left error -> do - Log.error $ "Failed to publish " <> formatted <> ": " <> error - Cache.put _importCache (PublishFailure manifest.name manifest.version) error - Right _ -> do - Log.info $ "Published " <> formatted + legacyIndex <- do + Log.info "Transitivizing legacy registry..." + pure + $ Solver.exploreAllTransitiveDependencies + $ Solver.initializeRegistry + $ map (map (un Manifest >>> _.dependencies)) (ManifestIndex.toMap importedIndex.registryIndex) + + void $ for manifests (publishLegacyPackage legacyIndex) + + Log.info "Finished publishing! Collecting all publish failures and writing to disk." + let + collectError prev (Manifest { name, version }) = do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure prev + Just error -> pure $ Map.insertWith Map.union name (Map.singleton version error) prev + failures <- Array.foldM collectError Map.empty allIndexPackages + Run.liftAff $ writePublishFailures failures + + let publishStats = collectPublishFailureStats importStats (map _.address reserved0_13) importedIndex.registryIndex failures + let publishStatsMessage = formatPublishFailureStats publishStats + Log.info publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "reserved-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable publishStats.packages.reserved))) + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "removed-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable (Set.difference publishStats.packages.failed publishStats.packages.reserved)))) + +-- | Record all package failures to the 'package-failures.json' file. +writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit +writePublishFailures = + writeJsonFile (packageMap (versionMap jsonValidationErrorCodec)) (Path.concat [ scratchDir, "publish-failures.json" ]) + <<< map (map formatPublishError) -- | Record all package failures to the 'package-failures.json' file. writePackageFailures :: Map RawPackageName PackageValidationError -> Aff Unit @@ -324,7 +540,7 @@ type LegacyRegistry = Map RawPackageName String type ImportedIndex = { failedPackages :: Map RawPackageName PackageValidationError , failedVersions :: Map RawPackageName (Map RawVersion VersionValidationError) - , reservedPackages :: Map PackageName Location + , removedPackages :: Map PackageName Location , registryIndex :: ManifestIndex , packageRefs :: Map PackageName (Map Version RawVersion) } @@ -362,16 +578,15 @@ importLegacyRegistry legacyRegistry = do -- A 'checked' index is one where we have verified that all dependencies -- are self-contained within the registry. - Tuple unsatisfied validIndex = ManifestIndex.maximalIndex validLegacyManifests + Tuple unsatisfied validIndex = ManifestIndex.maximalIndex ManifestIndex.ConsiderRanges validLegacyManifests -- The list of all packages that were present in the legacy registry files, - -- but which have no versions present in the fully-imported registry. These - -- packages still need to have empty metadata files written for them. - reservedPackages :: Map PackageName Location - reservedPackages = - Map.fromFoldable $ Array.mapMaybe reserved $ Map.toUnfoldable legacyRegistry + -- but which have no versions present in the fully-imported registry. + removedPackages :: Map PackageName Location + removedPackages = + Map.fromFoldable $ Array.mapMaybe removed $ Map.toUnfoldable legacyRegistry where - reserved (Tuple (RawPackageName name) address) = do + removed (Tuple (RawPackageName name) address) = do packageName <- hush $ PackageName.parse name guard $ isNothing $ Map.lookup packageName $ ManifestIndex.toMap validIndex { owner, repo } <- hush $ Parsing.runParser address legacyRepoParser @@ -403,7 +618,7 @@ importLegacyRegistry legacyRegistry = do pure { failedPackages: packageFailures , failedVersions: versionFailures - , reservedPackages: reservedPackages + , removedPackages: removedPackages , registryIndex: validIndex , packageRefs } @@ -428,17 +643,19 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa buildManifestForVersion :: Tag -> Run _ (Either VersionValidationError Manifest) buildManifestForVersion tag = Run.Except.runExceptAt _exceptVersion do version <- exceptVersion $ validateVersion tag - - -- TODO: This will use the manifest for the package version from the - -- registry, without trying to produce a legacy manifest. However, we may - -- want to always attempt to produce a legacy manifest. If we can produce - -- one we compare it to the existing entry, failing if there is a - -- difference; if we can't, we warn and fall back to the existing entry. - Registry.readManifest package.name (LenientVersion.version version) >>= case _ of + Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of + Just cached -> exceptVersion cached Nothing -> do - Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of + -- While technically not 'legacy', we do need to handle packages with + -- spago.yaml files because they've begun to pop up since the registry + -- alpha began and we don't want to drop them when doing a re-import. + fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of + Just manifest -> do + Log.debug $ "Built manifest from discovered spago.yaml file." + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) + pure manifest Nothing -> do - Log.debug $ "Building manifest in legacy import because it was not found in cache: " <> formatPackageVersion package.name (LenientVersion.version version) + Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) manifest <- Run.Except.runExceptAt _exceptVersion do exceptVersion $ validateVersionDisabled package.name version legacyManifest <- do @@ -446,13 +663,11 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } Right result -> pure result pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest exceptVersion manifest - Just cached -> - exceptVersion cached - - Just manifest -> - exceptVersion $ Right manifest manifests <- for package.tags \tag -> do manifest <- buildManifestForVersion tag @@ -460,6 +675,145 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa pure $ Map.fromFoldable manifests +data PublishError + = SolveFailedDependencies String + | SolveFailedCompiler String + | NoCompilersFound (Map (NonEmptyArray Version) CompilerFailure) + | UnsolvableDependencyCompilers (Array GroupedByCompilers) + | PublishError String + +derive instance Eq PublishError + +publishErrorCodec :: CJ.Codec PublishError +publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CJ.Variant.variantMatch + { solveFailedCompiler: Right CJ.string + , solveFailedDependencies: Right CJ.string + , noCompilersFound: Right compilerFailureMapCodec + , unsolvableDependencyCompilers: Right (CJ.array groupedByCompilersCodec) + , publishError: Right CJ.string + } + where + toVariant = case _ of + SolveFailedDependencies error -> Variant.inj (Proxy :: _ "solveFailedDependencies") error + SolveFailedCompiler error -> Variant.inj (Proxy :: _ "solveFailedCompiler") error + NoCompilersFound failed -> Variant.inj (Proxy :: _ "noCompilersFound") failed + UnsolvableDependencyCompilers group -> Variant.inj (Proxy :: _ "unsolvableDependencyCompilers") group + PublishError error -> Variant.inj (Proxy :: _ "publishError") error + + fromVariant = Variant.match + { solveFailedDependencies: SolveFailedDependencies + , solveFailedCompiler: SolveFailedCompiler + , noCompilersFound: NoCompilersFound + , unsolvableDependencyCompilers: UnsolvableDependencyCompilers + , publishError: PublishError + } + +type PublishFailureStats = + { packages :: { total :: Int, considered :: Int, partial :: Int, failed :: Set PackageName, reserved :: Set PackageName } + , versions :: { total :: Int, considered :: Int, failed :: Int, reason :: Map String Int } + } + +collectPublishFailureStats :: ImportStats -> Map PackageName Address -> ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats +collectPublishFailureStats importStats reserved0_13 importedIndex failures = do + let + index :: Map PackageName (Map Version Manifest) + index = ManifestIndex.toMap importedIndex + + countVersions :: forall a. Map PackageName (Map Version a) -> Int + countVersions = Array.foldl (\prev (Tuple _ versions) -> prev + Map.size versions) 0 <<< Map.toUnfoldable + + startPackages :: Int + startPackages = importStats.packagesProcessed + + consideredPackages :: Int + consideredPackages = Map.size index + + startVersions :: Int + startVersions = importStats.versionsProcessed + + consideredVersions :: Int + consideredVersions = countVersions index + + failedPackages :: Int + failedPackages = Map.size failures + + failedVersions :: Int + failedVersions = countVersions failures + + removedPackages :: Set PackageName + removedPackages = do + let + foldFn package prev versions = fromMaybe prev do + allVersions <- Map.lookup package index + guard (Map.keys allVersions == Map.keys versions) + pure $ Set.insert package prev + + foldlWithIndex foldFn Set.empty failures + + -- Packages that are eligible for removal — but are reserved due to 0.13 or + -- organization status — are the 'reserved packages'. + reservedPackages :: Set PackageName + reservedPackages = Set.intersection removedPackages (Map.keys reserved0_13) + + countByFailure :: Map String Int + countByFailure = do + let + toKey = case _ of + SolveFailedDependencies _ -> "Solving failed (dependencies)" + SolveFailedCompiler _ -> "Solving failed (compiler)" + NoCompilersFound _ -> "No compilers usable for publishing" + UnsolvableDependencyCompilers _ -> "Dependency compiler conflict" + PublishError _ -> "Publishing failed" + + foldFn prev (Tuple _ versions) = + Array.foldl (\prevCounts (Tuple _ error) -> Map.insertWith (+) (toKey error) 1 prevCounts) prev (Map.toUnfoldable versions) + + Array.foldl foldFn Map.empty (Map.toUnfoldable failures) + + { packages: + { total: startPackages + , considered: consideredPackages + , partial: failedPackages + , reserved: reservedPackages + , failed: removedPackages + } + , versions: + { total: startVersions + , considered: consideredVersions + , failed: failedVersions + , reason: countByFailure + } + } + +formatPublishFailureStats :: PublishFailureStats -> String +formatPublishFailureStats { packages, versions } = String.joinWith "\n" + [ "--------------------" + , "PUBLISH FAILURES" + , "--------------------" + , "" + , show packages.considered <> " of " <> show packages.total <> " total packages were considered for publishing (others had no manifests imported.)" + , " - " <> show (packages.total - packages.partial - (Set.size packages.failed)) <> " out of " <> show packages.considered <> " packages fully succeeded." + , " - " <> show packages.partial <> " packages partially succeeded." + , " - " <> show (Set.size packages.reserved) <> " packages fully failed, but are reserved due to 0.13 or organization status." + , " - " <> show (Set.size packages.failed - Set.size packages.reserved) <> " packages had all versions fail and will be removed." + , "" + , show versions.considered <> " of " <> show versions.total <> " total versions were considered for publishing.\n - " <> show versions.failed <> " out of " <> show versions.total <> " versions failed." + , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable versions.reason)) + ] + +compilerFailureMapCodec :: CJ.Codec (Map (NonEmptyArray Version) CompilerFailure) +compilerFailureMapCodec = do + let + print = NonEmptyArray.intercalate "," <<< map Version.print + parse input = do + let versions = String.split (String.Pattern ",") input + let { fail, success } = partitionEithers $ map Version.parse versions + case NonEmptyArray.fromArray success of + Nothing | Array.null fail -> Left "No versions" + Nothing -> Left $ "No versions parsed, some failed: " <> String.joinWith ", " fail + Just result -> pure result + Internal.Codec.strMap "CompilerFailureMap" parse print compilerFailureCodec + type EXCEPT_VERSION :: Row (Type -> Type) -> Row (Type -> Type) type EXCEPT_VERSION r = (exceptVersion :: Run.Except.Except VersionValidationError | r) @@ -569,6 +923,56 @@ type PackageResult = , tags :: Array Tag } +type PackagesMetadata = { address :: Address, lastPublished :: Date } + +packagesMetadataCodec :: CJ.Codec PackagesMetadata +packagesMetadataCodec = CJ.named "PackagesMetadata" $ CJ.Record.object + { address: CJ.named "Address" $ CJ.Record.object { owner: CJ.string, repo: CJ.string } + , lastPublished: Internal.Codec.iso8601Date + } + +getPackagesMetadata :: forall r. Map RawPackageName String -> Run (EXCEPT String + GITHUB + r) (Map PackageName PackagesMetadata) +getPackagesMetadata legacyRegistry = do + associated <- for (Map.toUnfoldableUnordered legacyRegistry) \(Tuple rawName rawUrl) -> do + Run.Except.runExceptAt (Proxy :: _ "exceptPackage") (validatePackage rawName rawUrl) >>= case _ of + Left _ -> pure Nothing + Right { name, address, tags } -> case Array.head tags of + Nothing -> pure Nothing + Just tag -> do + result <- GitHub.getCommitDate address tag.sha + case result of + Left error -> unsafeCrashWith ("Failed to get commit date for " <> PackageName.print name <> "@" <> tag.name <> ": " <> Octokit.printGitHubError error) + Right date -> pure $ Just $ Tuple name { address, lastPublished: DateTime.date date } + pure $ Map.fromFoldable $ Array.catMaybes associated + +filterPackages_0_13 :: Map PackageName PackagesMetadata -> Map PackageName PackagesMetadata +filterPackages_0_13 = do + let + -- 0.13 release date + cutoff = DateTime.canonicalDate (unsafeFromJust (toEnum 2019)) May (unsafeFromJust (toEnum 29)) + organizations = + [ "purescript" + , "purescript-contrib" + , "purescript-node" + , "purescript-web" + , "rowtype-yoga" + , "purescript-halogen" + , "purescript-deprecated" + ] + + Map.filterWithKey \_ metadata -> do + let { owner } = metadata.address + owner `Array.elem` organizations || metadata.lastPublished >= cutoff + +writePackagesMetadata :: forall r. Map PackageName PackagesMetadata -> Run (LOG + AFF + r) Unit +writePackagesMetadata pkgs = do + let path = Path.concat [ scratchDir, "packages-metadata.json" ] + Log.info $ "Writing packages metadata to " <> path + Run.liftAff $ writeJsonFile (packageMap packagesMetadataCodec) path pkgs + +readPackagesMetadata :: forall r. Run (AFF + r) (Either String (Map PackageName PackagesMetadata)) +readPackagesMetadata = Run.liftAff $ readJsonFile (packageMap packagesMetadataCodec) (Path.concat [ scratchDir, "packages-metadata.json" ]) + validatePackage :: forall r. RawPackageName -> String -> Run (GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) PackageResult validatePackage rawPackage rawUrl = do name <- exceptPackage $ validatePackageName rawPackage @@ -641,6 +1045,7 @@ validatePackageDisabled package = disabledPackages :: Map String String disabledPackages = Map.fromFoldable [ Tuple "metadata" reservedPackage + , Tuple "purs" reservedPackage , Tuple "bitstrings" noSrcDirectory , Tuple "purveyor" noSrcDirectory , Tuple "styled-components" noSrcDirectory @@ -661,14 +1066,14 @@ validatePackageName (RawPackageName name) = type JsonValidationError = { tag :: String - , value :: Maybe String + , value :: Maybe JSON , reason :: String } jsonValidationErrorCodec :: CJ.Codec JsonValidationError jsonValidationErrorCodec = CJ.named "JsonValidationError" $ CJ.Record.object { tag: CJ.string - , value: CJ.Record.optional CJ.string + , value: CJ.Record.optional CJ.json , reason: CJ.string } @@ -677,31 +1082,43 @@ formatPackageValidationError { error, reason } = case error of InvalidPackageName -> { tag: "InvalidPackageName", value: Nothing, reason } InvalidPackageURL url -> - { tag: "InvalidPackageURL", value: Just url, reason } + { tag: "InvalidPackageURL", value: Just (CJ.encode CJ.string url), reason } PackageURLRedirects { registered } -> - { tag: "PackageURLRedirects", value: Just (registered.owner <> "/" <> registered.repo), reason } + { tag: "PackageURLRedirects", value: Just (CJ.encode CJ.string (registered.owner <> "/" <> registered.repo)), reason } CannotAccessRepo address -> - { tag: "CannotAccessRepo", value: Just (address.owner <> "/" <> address.repo), reason } + { tag: "CannotAccessRepo", value: Just (CJ.encode CJ.string (address.owner <> "/" <> address.repo)), reason } DisabledPackage -> { tag: "DisabledPackage", value: Nothing, reason } formatVersionValidationError :: VersionValidationError -> JsonValidationError formatVersionValidationError { error, reason } = case error of InvalidTag tag -> - { tag: "InvalidTag", value: Just tag.name, reason } + { tag: "InvalidTag", value: Just (CJ.encode CJ.string tag.name), reason } DisabledVersion -> { tag: "DisabledVersion", value: Nothing, reason } InvalidManifest err -> do let errorValue = Legacy.Manifest.printLegacyManifestError err.error - { tag: "InvalidManifest", value: Just errorValue, reason } - UnregisteredDependencies names -> do - let errorValue = String.joinWith ", " $ map PackageName.print names - { tag: "UnregisteredDependencies", value: Just errorValue, reason } + { tag: "InvalidManifest", value: Just (CJ.encode CJ.string errorValue), reason } + UnregisteredDependencies names -> + { tag: "UnregisteredDependencies", value: Just (CJ.encode (CJ.array PackageName.codec) names), reason } + +formatPublishError :: PublishError -> JsonValidationError +formatPublishError = case _ of + SolveFailedCompiler error -> + { tag: "SolveFailedCompiler", value: Nothing, reason: error } + SolveFailedDependencies error -> + { tag: "SolveFailedDependencies", value: Nothing, reason: error } + NoCompilersFound versions -> + { tag: "NoCompilersFound", value: Just (CJ.encode compilerFailureMapCodec versions), reason: "No valid compilers found for publishing." } + UnsolvableDependencyCompilers failed -> + { tag: "UnsolvableDependencyCompilers", value: Just (CJ.encode (CJ.array groupedByCompilersCodec) failed), reason: "Resolved dependencies cannot compile together" } + PublishError error -> + { tag: "PublishError", value: Nothing, reason: error } type ImportStats = { packagesProcessed :: Int , versionsProcessed :: Int - , packageNamesReserved :: Int + , packageNamesRemoved :: Int , packageResults :: { success :: Int, partial :: Int, fail :: Int } , versionResults :: { success :: Int, fail :: Int } , packageErrors :: Map String Int @@ -714,7 +1131,7 @@ formatImportStats stats = String.joinWith "\n" , show stats.packagesProcessed <> " packages processed:" , indent $ show stats.packageResults.success <> " fully successful" , indent $ show stats.packageResults.partial <> " partially successful" - , indent $ show (stats.packageNamesReserved - stats.packageResults.fail) <> " reserved (no usable versions)" + , indent $ show (stats.packageNamesRemoved - stats.packageResults.fail) <> " omitted (no usable versions)" , indent $ show stats.packageResults.fail <> " fully failed" , indent "---" , formatErrors stats.packageErrors @@ -747,8 +1164,8 @@ calculateImportStats legacyRegistry imported = do packagesProcessed = Map.size legacyRegistry - packageNamesReserved = - Map.size imported.reservedPackages + packageNamesRemoved = + Map.size imported.removedPackages packageResults = do let succeeded = Map.keys registryIndex @@ -801,7 +1218,7 @@ calculateImportStats legacyRegistry imported = do { packagesProcessed , versionsProcessed - , packageNamesReserved + , packageNamesRemoved , packageResults , versionResults , packageErrors @@ -830,12 +1247,129 @@ legacyRepoParser = do pure { owner, repo } +fetchSpagoYaml :: forall r. Address -> RawVersion -> Run (GITHUB + LOG + EXCEPT String + r) (Maybe Manifest) +fetchSpagoYaml address ref = do + eitherSpagoYaml <- GitHub.getContent address ref "spago.yaml" + case eitherSpagoYaml of + Left err -> do + Log.debug $ "No spago.yaml found: " <> Octokit.printGitHubError err + pure Nothing + Right contents -> do + Log.debug $ "Found spago.yaml file\n" <> contents + case parseYaml SpagoYaml.spagoYamlCodec contents of + Left error -> do + Log.warn $ "Failed to parse spago.yaml file:\n" <> contents <> "\nwith errors:\n" <> error + pure Nothing + Right { package: Just { publish: Just { location: Just location } } } + | location /= GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } -> do + Log.warn "spago.yaml file does not use the same location it was fetched from, this is disallowed..." + pure Nothing + Right config -> case SpagoYaml.spagoYamlToManifest config of + Left err -> do + Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> contents <> "\nwith errors:\n" <> err + pure Nothing + Right manifest -> do + Log.debug "Successfully converted a spago.yaml into a purs.json manifest" + pure $ Just manifest + +-- | Find the first compiler that can compile the package source code and +-- | installed resolutions from the given array of compilers. Begins with the +-- | latest compiler and works backwards to older compilers. +findFirstCompiler + :: forall r + . { compilers :: Array Version + , manifest :: Manifest + , resolutions :: Map PackageName Version + , source :: FilePath + , installed :: FilePath + } + -> Run (COMPILER_CACHE + STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) +findFirstCompiler { source, manifest, resolutions, compilers, installed } = do + search <- Run.Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do + result <- Cache.get API._compilerCache (API.Compilation manifest resolutions target) >>= case _ of + Nothing -> do + Log.info $ "Not cached, trying compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + let cache = { result: map (const unit) result, target } + Cache.put API._compilerCache (API.Compilation manifest resolutions target) cache + pure cache.result + Just cached -> + pure cached.result + + case result of + Left error -> pure $ Tuple target error + Right _ -> Run.Except.throw target + + case search of + Left worked -> pure $ Right worked + Right others -> pure $ Left $ Map.fromFoldable others + +type GroupedByCompilers = + { packages :: Map PackageName Version + , compilers :: NonEmptySet Version + } + +groupedByCompilersCodec :: CJ.Codec GroupedByCompilers +groupedByCompilersCodec = CJ.named "GroupedByCompilers" $ CJ.Record.object + { compilers: CJ.Common.nonEmptySet Version.codec + , packages: Internal.Codec.packageMap Version.codec + } + +-- | Given a set of package versions, determine the set of compilers that can be +-- | used for all packages. +compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Either (Array GroupedByCompilers) (NonEmptySet Version) +compatibleCompilers allMetadata resolutions = do + let + associated :: Array { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } + associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do + Metadata metadata <- Map.lookup name allMetadata + published <- Map.lookup version metadata.published + Just { name, version, compilers: published.compilers } + + case Array.uncons associated of + Nothing -> + Left [] + Just { head, tail: [] } -> + Right $ NonEmptySet.fromFoldable1 head.compilers + Just { head, tail } -> do + let foldFn prev = Set.intersection prev <<< Set.fromFoldable <<< _.compilers + case NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head.compilers) tail of + -- An empty intersection means there are no shared compilers among the + -- resolved dependencies. + Nothing -> do + let + grouped :: Array (NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version }) + grouped = Array.groupAllBy (compare `on` _.compilers) (Array.cons head tail) + + collect :: NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } -> GroupedByCompilers + collect vals = + { packages: Map.fromFoldable (map (\{ name, version } -> Tuple name version) vals) + -- We've already grouped by compilers, so those must all be equal + -- and we can take just the first value. + , compilers: NonEmptySet.fromFoldable1 (NonEmptyArray.head vals).compilers + } + Left $ Array.foldl (\prev -> Array.snoc prev <<< collect) [] grouped + + Just set -> + Right set + +type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) + +_importCache :: Proxy "importCache" +_importCache = Proxy + -- | A key type for the storage cache. Only supports packages identified by -- | their name and version. data ImportCache :: (Type -> Type -> Type) -> Type -> Type data ImportCache c a = ImportManifest PackageName RawVersion (c (Either VersionValidationError Manifest) a) - | PublishFailure PackageName Version (c String a) + | PublishFailure PackageName Version (c PublishError a) instance Functor2 c => Functor (ImportCache c) where map k (ImportManifest name version a) = ImportManifest name version (map2 k a) @@ -846,7 +1380,7 @@ instance MemoryEncodable ImportCache where ImportManifest name (RawVersion version) next -> Exists.mkExists $ Key ("ImportManifest__" <> PackageName.print name <> "__" <> version) next PublishFailure name version next -> do - Exists.mkExists $ Key ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) next + Exists.mkExists $ Key ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) next instance FsEncodable ImportCache where encodeFs = case _ of @@ -854,10 +1388,5 @@ instance FsEncodable ImportCache where let codec = CJ.Common.either versionValidationErrorCodec Manifest.codec Exists.mkExists $ AsJson ("ImportManifest__" <> PackageName.print name <> "__" <> version) codec next PublishFailure name version next -> do - let codec = CJ.string - Exists.mkExists $ AsJson ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) codec next - -type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) - -_importCache :: Proxy "importCache" -_importCache = Proxy + let codec = publishErrorCodec + Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 0bcacc643..db9b54d23 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -16,6 +16,7 @@ import Effect.Class.Console (log) import Effect.Class.Console as Console import Node.Path as Path import Node.Process as Process +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache @@ -152,10 +153,11 @@ main = launchAff_ do interpret = Registry.interpret (Registry.handle registryEnv) >>> Storage.interpret (if arguments.upload then Storage.handleS3 { s3, cache } else Storage.handleReadOnly cache) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) + >>> Cache.interpret _compilerCache (Cache.handleFs cache) >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv @@ -237,10 +239,11 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish LegacyPackage + API.publish Nothing { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref + , version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index ffd66dbd2..8fa9a7070 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -28,6 +28,7 @@ import Node.Path as Path import Node.Process as Node.Process import Node.Process as Process import Parsing as Parsing +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache @@ -127,7 +128,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) let @@ -148,6 +149,7 @@ main = launchAff_ do # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv diff --git a/shell.nix b/shell.nix deleted file mode 100644 index ceca7e4e4..000000000 --- a/shell.nix +++ /dev/null @@ -1,11 +0,0 @@ -# A compatibility file that allows non-flakes users to still get a development -# shell with `nix-shell`. -(import ( - let - lock = builtins.fromJSON (builtins.readFile ./flake.lock); - in - fetchTarball { - url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; - sha256 = lock.nodes.flake-compat.locked.narHash; - } -) { src = ./.; }).shellNix diff --git a/types/v1/Metadata.dhall b/types/v1/Metadata.dhall index 84685290c..083960152 100644 --- a/types/v1/Metadata.dhall +++ b/types/v1/Metadata.dhall @@ -1,4 +1,5 @@ let Map = (./Prelude.dhall).Map.Type +let NonEmpty = (./Prelude.dhall).NonEmpty.Type let Owner = ./Owner.dhall @@ -14,6 +15,7 @@ let PublishedMetadata = { hash : Sha256 , bytes : Natural , publishedTime : ISO8601String + , compilers : NonEmpty Version } let UnpublishedMetadata = diff --git a/types/v1/Prelude.dhall b/types/v1/Prelude.dhall index 8b05657c4..d86e105e1 100644 --- a/types/v1/Prelude.dhall +++ b/types/v1/Prelude.dhall @@ -2,4 +2,4 @@ -- remote hosts in an offline environment (such as Nix in CI). DHALL_PRELUDE is -- automatically set in your Nix shell, but if you are not using a Nix shell and -- want to run this locally then the URL will be used instead. -env:DHALL_PRELUDE ? https://prelude.dhall-lang.org/v19.0.0/package.dhall sha256:eb693342eb769f782174157eba9b5924cf8ac6793897fc36a31ccbd6f56dafe2 +env:DHALL_PRELUDE