From abde4c27bc88e11d73ef8dcfd56a41d1d18d74b6 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 14 Sep 2024 14:07:21 -0400 Subject: [PATCH 1/6] Update database schemas and add job executor loop --- app/src/App/Effect/Db.purs | 130 +++-- app/src/App/Effect/Log.purs | 2 +- app/src/App/SQLite.js | 173 +++++-- app/src/App/SQLite.purs | 458 +++++++++++++----- app/src/App/Server.purs | 208 +++++--- .../20240914170550_delete_jobs_logs_table.sql | 22 + ...20240914171030_create_job_queue_tables.sql | 56 +++ db/schema.sql | 52 +- lib/src/API/V1.purs | 2 - lib/src/Operation.purs | 21 + 10 files changed, 875 insertions(+), 249 deletions(-) create mode 100644 db/migrations/20240914170550_delete_jobs_logs_table.sql create mode 100644 db/migrations/20240914171030_create_job_queue_tables.sql diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..142149bc0 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -8,10 +8,12 @@ import Data.String as String import Registry.API.V1 (JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageJob, InsertPackageSetJob, JobInfo, MatrixJobDetails, PackageJobDetails, PackageSetJobDetails, SQLite, StartJob) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +23,20 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a + = InsertPackageJob InsertPackageJob a + | InsertMatrixJob InsertMatrixJob a + | InsertPackageSetJob InsertPackageSetJob a + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) + | SelectNextPackageJob (Either String (Maybe PackageJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + | DeleteIncompleteJobs a derive instance Functor Db @@ -39,28 +48,51 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. +-- | Select all logs for a given job, filtered by loglevel. selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) +selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow + +-- | Insert a new package job into the database. +insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) Unit +insertPackageJob job = Run.lift _db (InsertPackageJob job unit) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) Unit +insertMatrixJob job = Run.lift _db (InsertMatrixJob job unit) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) Unit +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job unit) + +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Select the next package job from the database. +selectNextPackageJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageJobDetails) +selectNextPackageJob = Run.lift _db (SelectNextPackageJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +deleteIncompleteJobs :: forall r. Run (DB + r) Unit +deleteIncompleteJobs = Run.lift _db (DeleteIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +102,52 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log + InsertPackageJob job next -> do + Run.liftEffect $ SQLite.insertPackageJob env.db job pure next - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertMatrixJob job next -> do + Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure next - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + InsertPackageSetJob job next -> do + Run.liftEffect $ SQLite.insertPackageSetJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId - pure $ reply job + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job + pure next + + SelectJobInfo jobId reply -> do + result <- Run.liftEffect $ SQLite.selectJobInfo env.db jobId + pure $ reply result + + SelectNextPackageJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log + pure next + + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success + + DeleteIncompleteJobs next -> do + Run.liftEffect $ SQLite.deleteIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..a1cb72c0a 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -134,5 +134,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..fa9a8b539 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,11 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PACKAGE_JOBS_TABLE = 'package_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +17,152 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPackageJobImpl = (db, job) => { + const columns = [ 'jobId', 'jobType', 'packageName', 'packageVersion', 'payload' ] + return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const insertMatrixJobImpl = (db, job) => { + const columns = [ 'jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload' ] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const insertPackageSetJobImpl = (db, job) => { + const columns = [ 'jobId', 'payload' ] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +export const selectNextPackageJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectNextMatrixJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${MATRIX_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectNextPackageSetJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); + const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? + `; + + const params = [jobId, logLevel]; + + if (since !== null) { + query += ' AND timestamp >= ?'; + params.push(since); + } + + query += ' ORDER BY timestamp ASC'; + + const stmt = db.prepare(query); + return stmt.all(...params); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..8c117fda7 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,426 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + +-- TOMORROW: +-- +-- * Add the job executor to server startup +-- * Move the various job details to the API.V1 module since it'll be returned by the UI +-- * Update the router to just create a job when received, and on lookup to return relevant details from the db +-- * Update the router to have an endpoint for creating a package set job and compiler matrix job using the +-- same authentication requirements as for GitHub today. +-- * Move the compiler matrix out of publish into its own functionality so it can be called. We want to +-- be able to spawn a matrix job at any time for a compiler/package version pair, but need a helper to +-- do the whole toposort thing. +-- * Update job execution to actually call the relevant publish/unpublish/transfer/package set API fn +-- +-- LATER +-- * Update tests that refer to the DB effect +-- * Adjust the integration test(s) to verify we're getting enforced concurrency control +-- * Update the GitHub issue module so it only submits a request to the registry and returns +-- a job id, rather than actually running the fns directly. Poll for a result still and +-- comment when the job completes. +-- +-- FOLLOWUP +-- * Punt on the squash commit until later. module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob - , SQLite + ( SQLite + , ConnectOptions , connect - , createJob - , deleteIncompleteJobs + , JobInfo + , selectJobInfo + , InsertPackageJob + , insertPackageJob + , InsertMatrixJob + , insertMatrixJob + , InsertPackageSetJob + , insertPackageSetJob + , FinishJob , finishJob - , insertLog - , runningJobForPackage - , selectJob + , StartJob + , startJob + , deleteIncompleteJobs + , insertLogLine , selectLogsByJob + , PackageJobDetails + , selectNextPackageJob + , MatrixJobDetails + , selectNextMatrixJob + , PackageSetJobDetails + , selectNextPackageSetJob ) where import Registry.App.Prelude -import Data.Array as Array +import Codec.JSON.DecodeError as JSON.DecodeError import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Nullable as Nullable +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) import Registry.API.V1 as API.V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (PackageOperation, PackageSetOperation) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit - -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + isSuccess <- case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: isSuccess + } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +selectJobInfo db (JobId jobId) = do + maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId + pure $ traverse jobInfoFromJSRep maybeJobInfo + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: if success then 1 else 0 + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSStartJob = + { jobId :: String + , startedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit + +deleteIncompleteJobs :: SQLite -> Effect Unit +deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl -type NewJob = +type InsertPackageJob = { jobId :: JobId , jobType :: JobType - , createdAt :: DateTime , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PackageOperation } -type JSNewJob = +type JSInsertPackageJob = { jobId :: String , jobType :: String - , createdAt :: String , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId +insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep { jobId, jobType, packageName, packageVersion, payload } = + { jobId: un JobId jobId , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt , packageName: PackageName.print packageName - , ref + , packageVersion: Version.print packageVersion + , payload: stringifyJson Operation.packageOperationCodec payload + } + +foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit +insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertPackageJobToJSRep + +type InsertMatrixJob = + { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + } + +type JSInsertMatrixJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep { jobId, packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } -type JobResult = +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit +insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep + +type InsertPackageSetJob = { jobId :: JobId - , finishedAt :: DateTime - , success :: Boolean + , payload :: PackageSetOperation } -type JSJobResult = +type JSInsertPackageSetJob = { jobId :: String - , finishedAt :: String - , success :: Int + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep { jobId, payload } = + { jobId: un JobId jobId + , payload: stringifyJson Operation.packageSetOperationCodec payload } -type Job = +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep + +type PackageJobDetails = { jobId :: JobId , jobType :: JobType , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PackageOperation , createdAt :: DateTime - , finishedAt :: Maybe DateTime - , success :: Boolean + , startedAt :: Maybe DateTime } -type JSJob = +type JSPackageJobDetails = { jobId :: String , jobType :: String , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String , createdAt :: String - , finishedAt :: Nullable String - , success :: Int + , startedAt :: Nullable String } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } +packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails +packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do + ty <- API.V1.parseJobType jobType + name <- PackageName.parse packageName + version <- Version.parse packageVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + pure + { jobId: JobId jobId + , jobType: ty + , packageName: name + , packageVersion: version + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) + +selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) +selectNextPackageJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db + pure $ traverse packageJobDetailsFromJSRep maybeJobDetails + +type MatrixJobDetails = + { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSMatrixJobDetails = + { jobId :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextMatrixJobImpl :: EffectFn1 SQLite (Nullable JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +type PackageSetJobDetails = + { jobId :: JobId + , payload :: PackageSetOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSPackageSetJobDetails = + { jobId :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt } = do + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + pure + { jobId: JobId jobId + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageSetJobImpl :: EffectFn1 SQLite (Nullable JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp + } -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int (Nullable String) (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = map (DateTime.format Internal.Format.iso8601DateTime) since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + (Nullable.toNullable timestamp) + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index 64e1bcc86..4d7113bb3 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -3,13 +3,22 @@ module Registry.App.Server where import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) +import Control.Parallel as Parallel import Data.Codec.JSON as CJ +import Data.DateTime (DateTime(..)) +import Data.DateTime as DateTime import Data.Formatter.DateTime as Formatter.DateTime +import Data.Lens (Lens') +import Data.Lens as Lens +import Data.Lens.Record as Lens.Record import Data.Newtype (unwrap) import Data.String as String +import Data.Time.Duration (Minutes(..)) import Data.UUID.Random as UUID +import Effect.Aff (Fiber, Milliseconds(..)) import Effect.Aff as Aff import Effect.Class.Console as Console +import Effect.Ref as Ref import Fetch.Retry as Fetch.Retry import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) import HTTPurple as HTTPurple @@ -43,7 +52,7 @@ import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, SQLite, PackageSetJobDetails) import Registry.App.SQLite as SQLite import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (GitHubToken, Octokit) @@ -56,40 +65,121 @@ import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +import Run.Except as Run.Except newJobId :: forall m. MonadEffect m => m JobId newJobId = liftEffect do id <- UUID.make pure $ JobId $ UUID.toString id +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = do + Db.selectNextPackageJob >>= case _ of + Just job -> pure $ Just $ PackageJob job + Nothing -> Db.selectNextMatrixJob >>= case _ of + Just job -> pure $ Just $ MatrixJob job + Nothing -> Db.selectNextPackageSetJob >>= case _ of + Just job -> pure $ Just $ PackageSetJob job + Nothing -> pure Nothing + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = do + runEffects env Db.deleteIncompleteJobs >>= case _ of + Left err -> pure $ Left err + Right _ -> loop + where + loop = runEffects env findNextAvailableJob >>= case _ of + Left err -> + pure $ Left err + + Right Nothing -> do + Aff.delay (Milliseconds 100.0) + loop + + Right (Just job) -> do + now <- nowUTC + + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- do + let execute = map Just (runEffects env (executeJob now job)) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + finishResult <- runEffects env $ case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + Db.finishJob { jobId, finishedAt: now, success: true } + + case finishResult of + Left err -> pure $ Left err + Right _ -> loop + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob now = case _ of + PackageJob { jobId } -> do + Db.startJob { jobId, startedAt: now } + pure unit -- UNIMPLEMENTED + MatrixJob _details -> + pure unit -- UNIMPLEMENTED + PackageSetJob _details -> + pure unit -- UNIMPLEMENTED + +squashCommitRegistry :: Run ServerEffects Unit +squashCommitRegistry = do + pure unit + router :: ServerEnv -> Request Route -> Run ServerEffects Response router env { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish Nothing publish + -- publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + -- lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + -- forkPipelineJob publish.name publish.ref PublishJob \jobId -> do + -- Log.info $ "Received Publish request, job id: " <> unwrap jobId + -- API.publish Nothing publish + HTTPurple.emptyResponse Status.ok Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." + -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + -- case auth.payload of + -- Operation.Unpublish { name, version } -> do + -- forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do + -- Log.info $ "Received Unpublish request, job id: " <> unwrap jobId + -- API.authenticated auth + -- _ -> + -- HTTPurple.badRequest "Expected unpublish operation." + HTTPurple.emptyResponse Status.ok Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." + HTTPurple.emptyResponse Status.ok + -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + -- case auth.payload of + -- Operation.Transfer { name } -> do + -- forkPipelineJob name "" TransferJob \jobId -> do + -- Log.info $ "Received Transfer request, job id: " <> unwrap jobId + -- API.authenticated auth + -- _ -> + -- HTTPurple.badRequest "Expected transfer operation." Jobs, Get -> do jsonOk (CJ.array V1.jobCodec) [] @@ -97,12 +187,17 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of + lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) + Right Nothing -> + HTTPurple.notFound + Right (Just job) -> do + HTTPurple.emptyResponse Status.ok + -- TODO: Return the job details (will need to update the jobCodec and move the various + -- details into the API module). + -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) Status, Get -> HTTPurple.emptyResponse Status.ok @@ -112,35 +207,34 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of _, _ -> HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } + -- where + -- forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response + -- forkPipelineJob packageName ref jobType action = do + -- -- First thing we check if the package already has a pipeline in progress + -- lift (Db.runningJobForPackage packageName) >>= case _ of + -- -- If yes, we error out if it's the wrong kind, return it if it's the same type + -- Right { jobId, jobType: runningJobType } -> do + -- lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId + -- case runningJobType == jobType of + -- true -> jsonOk V1.jobCreatedResponseCodec { jobId } + -- false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName + -- -- otherwise spin up a new thread + -- Left _err -> do + -- lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" + -- jobId <- newJobId + -- now <- nowUTC + -- let newJob = { createdAt: now, jobId, jobType, packageName, ref } + -- lift $ Db.createJob newJob + -- let newEnv = env { jobId = Just jobId } + + -- _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do + -- result <- runEffects newEnv (action jobId) + -- case result of + -- Left _ -> pure unit + -- Right _ -> do + -- finishedAt <- nowUTC + -- void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) + -- jsonOk V1.jobCreatedResponseCodec { jobId } type ServerEnvVars = { token :: GitHubToken @@ -219,7 +313,11 @@ createServerEnv = do type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response runServer env router' request = do result <- runEffects env (router' env request) case result of diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..2b01deb0b --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,56 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Package-oriented jobs (publish/unpublish/transfer) +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs (one compiler, all packages) +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE package_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..2ad866068 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,45 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..67216ca35 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -68,7 +68,6 @@ type Job = { jobId :: JobId , jobType :: JobType , packageName :: PackageName - , ref :: String , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -80,7 +79,6 @@ jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec , jobType: jobTypeCodec , packageName: PackageName.codec - , ref: CJ.string , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..521bc2883 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -23,6 +23,8 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec @@ -58,6 +60,18 @@ data PackageOperation derive instance Eq PackageOperation +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -178,6 +192,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: From d8f2ce3de052b9de8acb10a137277d71d3af11bd Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Sun, 8 Jun 2025 10:08:23 -0400 Subject: [PATCH 2/6] A shell script to run local server without wrapping it in Nix --- .gitignore | 4 + app/src/App/Server.purs | 1 + run-local-server.sh | 183 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 188 insertions(+) create mode 100755 run-local-server.sh diff --git a/.gitignore b/.gitignore index e0c3a931c..bdba77070 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ /output /scratch /.vscode +/.temp result @@ -18,3 +19,6 @@ result # Keep it secret, keep it safe. .env .envrc + +.claude +CLAUDE.md diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index 659b4ad8a..7e42001f6 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -152,6 +152,7 @@ type ServerEnvVars = readServerEnvVars :: Aff ServerEnvVars readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" Env.loadEnvFile ".env" token <- Env.lookupRequired Env.pacchettibottiToken publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub diff --git a/run-local-server.sh b/run-local-server.sh new file mode 100755 index 000000000..183fd6c3f --- /dev/null +++ b/run-local-server.sh @@ -0,0 +1,183 @@ +#!/usr/bin/env bash + +# Script to run the PureScript Registry server locally without VM. +# +# The script starts a bunch of external service mocks using WireMock, creates a +# Sqlite DB (if doesn't exist yet) and sets up overriding .env file pointing to +# those mock services and the DB. All of that is kept under `.temp/local-server/`. +# To reset the environment, nuke that directory. + +set -euo pipefail + +# Configuration +MOCK_GITHUB_PORT=9001 +MOCK_BUCKET_PORT=9002 +MOCK_S3_PORT=9003 +MOCK_PURSUIT_PORT=9004 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PIDS=() +CACHE_DIR="$SCRIPT_DIR/.temp/local-server" +mkdir -p "$CACHE_DIR" + +# Cleanup function +cleanup() { + for pid in "${PIDS[@]}"; do + if kill -0 "$pid" 2>/dev/null; then + kill "$pid" 2>/dev/null || true + fi + done + + echo "Killed all mock services." +} + +# Set up cleanup trap +trap cleanup EXIT INT TERM + +# Function to start a wiremock service +start_wiremock() { + local service_name=$1 + local port=$2 + local mappings=$3 + local service_dir="$CACHE_DIR/wiremock-$service_name" + mkdir -p "$service_dir/mappings" "$service_dir/__files" + echo > "$service_dir/mappings/mappings.json" "$mappings" + cp "$SCRIPT_DIR/app/fixtures/registry-storage"/*.tar.gz "$service_dir/__files/" 2>/dev/null || true + + # Start wiremock + nix run nixpkgs#wiremock -- \ + --port "$port" \ + --root-dir "$service_dir" \ + --disable-banner \ + --verbose & + + local pid=$! + PIDS+=("$pid") +} + +mkdir -p "$CACHE_DIR/repo-fixtures/purescript" +cp -r "$SCRIPT_DIR/app/fixtures/github-packages"/* "$CACHE_DIR/repo-fixtures/purescript/" 2>/dev/null || true +cp -r "$SCRIPT_DIR/app/fixtures/registry-index" "$CACHE_DIR/repo-fixtures/purescript/" 2>/dev/null || true +cp -r "$SCRIPT_DIR/app/fixtures/package-sets" "$CACHE_DIR/repo-fixtures/purescript/" 2>/dev/null || true + +start_wiremock "github-api" $MOCK_GITHUB_PORT ' +{ + "mappings": [ + { + "request": { + "method": "GET", + "url": "/repos/purescript/purescript-effect/contents/bower.json?ref=v4.0.0" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/json" + }, + "jsonBody": { + "type": "file", + "encoding": "base64", + "content": "ewogICJuYW1lIjogInB1cmVzY3JpcHQtZWZmZWN0IiwKICAiaG9tZXBhZ2UiOiAiaHR0cHM6Ly9naXRodWIuY29tL3B1cmVzY3JpcHQvcHVyZXNjcmlwdC1lZmZlY3QiLAogICJsaWNlbnNlIjogIkJTRC0zLUNsYXVzZSIsCiAgInJlcG9zaXRvcnkiOiB7CiAgICAidHlwZSI6ICJnaXQiLAogICAgInVybCI6ICJodHRwczovL2dpdGh1Yi5jb20vcHVyZXNjcmlwdC9wdXJlc2NyaXB0LWVmZmVjdC5naXQiCiAgfSwKICAiaWdub3JlIjogWwogICAgIioqLy4qIiwKICAgICJib3dlcl9jb21wb25lbnRzIiwKICAgICJub2RlX21vZHVsZXMiLAogICAgIm91dHB1dCIsCiAgICAidGVzdCIsCiAgICAiYm93ZXIuanNvbiIsCiAgICAicGFja2FnZS5qc29uIgogIF0sCiAgImRlcGVuZGVuY2llcyI6IHsKICAgICJwdXJlc2NyaXB0LXByZWx1ZGUiOiAiXjYuMC4wIgogIH0KfQo=" + } + } + }, + { + "request": { + "method": "GET", + "url": "/repos/purescript/package-sets/tags" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/json" + }, + "jsonBody": { + "name": "psc-0.15.10-20230105", + "commit": { + "sha": "090897c992b2b310b1456506308db789672adac1", + "url": "https://api.github.com/repos/purescript/package-sets/commits/090897c992b2b310b1456506308db789672adac1" + } + } + } + } + ] +}' + +start_wiremock "s3-api" $MOCK_S3_PORT ' +{ + "mappings": [ + { + "request": { + "method": "GET", + "url": "/prelude/6.0.1.tar.gz" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/octet-stream" + }, + "bodyFileName": "prelude-6.0.1.tar.gz" + } + } + ] +}' + +start_wiremock "bucket-api" $MOCK_BUCKET_PORT ' +{ + "mappings": [ + { + "request": { + "method": "GET", + "urlPattern": "/.*" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/xml" + }, + "body": "" + } + } + ] +}' + +start_wiremock "pursuit-api" $MOCK_PURSUIT_PORT ' +{ + "mappings": [ + { + "request": { + "method": "POST", + "urlPattern": "/packages.*" + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/json" + }, + "jsonBody": { + "success": true + } + } + } + ] +}' + +if [ ! -f "$SCRIPT_DIR/.env" ]; then + cp "$SCRIPT_DIR/.env.example" "$SCRIPT_DIR/.env" +fi + +if [ ! -f "$CACHE_DIR/registry.sqlite3" ]; then + sqlite3 "$CACHE_DIR/registry.sqlite3" < "$SCRIPT_DIR/db/schema.sql" +fi + + cat > "$CACHE_DIR/.env.local" <<-END +DATABASE_URL="sqlite:$CACHE_DIR/registry.sqlite3" +DHALL_TYPES="$SCRIPT_DIR/types" +GITHUB_API_URL=http://localhost:$MOCK_GITHUB_PORT +S3_API_URL=http://localhost:$MOCK_S3_PORT +S3_BUCKET_URL=http://localhost:$MOCK_BUCKET_PORT +PURSUIT_API_URL=http://localhost:$MOCK_PURSUIT_PORT +END + +# Using a specific version of Spago until the new lockfile structure is +# supported by the PureScript Nix overlay. +npx --yes spago@0.93.19 run -p registry-app From fec4ceb684b24491e7483b459d65a3174796aa48 Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Sat, 21 Jun 2025 23:13:52 -0400 Subject: [PATCH 3/6] Split Server module into Env, Router, JobExecutor, and Main --- app/spago.yaml | 2 +- app/src/App/JobExecutor.purs | 89 +++++++ app/src/App/Main.purs | 88 +++++++ app/src/App/SQLite.js | 4 +- app/src/App/SQLite.purs | 29 +-- app/src/App/Server.purs | 446 --------------------------------- app/src/App/Server/Env.purs | 188 ++++++++++++++ app/src/App/Server/Router.purs | 84 +++++++ lib/src/API/V1.purs | 25 +- lib/src/JobType.purs | 26 ++ lib/src/Operation.purs | 17 +- 11 files changed, 505 insertions(+), 493 deletions(-) create mode 100644 app/src/App/JobExecutor.purs create mode 100644 app/src/App/Main.purs delete mode 100644 app/src/App/Server.purs create mode 100644 app/src/App/Server/Env.purs create mode 100644 app/src/App/Server/Router.purs create mode 100644 lib/src/JobType.purs diff --git a/app/spago.yaml b/app/spago.yaml index 3919e7bb2..135e766b4 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/JobExecutor.purs b/app/src/App/JobExecutor.purs new file mode 100644 index 000000000..0bd6fa44f --- /dev/null +++ b/app/src/App/JobExecutor.purs @@ -0,0 +1,89 @@ +module Registry.App.JobExecutor where + +import Registry.App.Prelude hiding ((/)) + +import Control.Parallel as Parallel +import Data.DateTime (DateTime) +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Registry.API.V1 (JobId(..)) +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) +import Registry.App.Server.Env (ServerEnv, ServerEffects, runEffects) +import Run (Run) +import Run.Except (EXCEPT) + +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = do + Db.selectNextPackageJob >>= case _ of + Just job -> pure $ Just $ PackageJob job + Nothing -> Db.selectNextMatrixJob >>= case _ of + Just job -> pure $ Just $ MatrixJob job + Nothing -> Db.selectNextPackageSetJob >>= case _ of + Just job -> pure $ Just $ PackageSetJob job + Nothing -> pure Nothing + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = do + runEffects env Db.deleteIncompleteJobs >>= case _ of + Left err -> pure $ Left err + Right _ -> loop + where + loop = runEffects env findNextAvailableJob >>= case _ of + Left err -> + pure $ Left err + + Right Nothing -> do + Aff.delay (Milliseconds 100.0) + loop + + Right (Just job) -> do + now <- nowUTC + + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- do + let execute = Just <$> runEffects env (executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + finishResult <- runEffects env case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + Db.finishJob { jobId, finishedAt: now, success: true } + + case finishResult of + Left err -> pure $ Left err + Right _ -> loop + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob now = case _ of + PackageJob { jobId } -> do + Db.startJob { jobId, startedAt: now } + pure unit -- UNIMPLEMENTED + MatrixJob _details -> + pure unit -- UNIMPLEMENTED + PackageSetJob _details -> + pure unit -- UNIMPLEMENTED diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..f8bb129bd --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,88 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.String as String +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import HTTPurple (Request, Response) +import HTTPurple as HTTPurple +import Node.Process as Process +import Registry.API.V1 (Route) +import Registry.API.V1 as V1 +import Registry.App.Server.Env (ServerEnv, createServerEnv, runEffects) +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + _healthcheck <- Aff.launchAff do + let + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = + Fetch.Retry.withRetryRequest env.vars.resourceEnv.healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> + Console.error "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." + + loop limit + + _close <- HTTPurple.serve + { hostname: "0.0.0.0" + , port: 8080 + , onStarted + } + { route: V1.routes + , router: runServer env + } + pure unit + where + onStarted :: Effect Unit + onStarted = do + Console.log $ String.joinWith "\n" + [ " ┌───────────────────────────────────────────┐" + , " │ Server now up on port 8080 │" + , " │ │" + , " │ To test, run: │" + , " │ > curl -v localhost:8080/api/v1/jobs │" + , " └───────────────────────────────────────────┘" + ] + + runServer :: ServerEnv -> Request Route -> Aff Response + runServer env request = do + result <- runEffects env (Router.router env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index fa9a8b539..97521d202 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -67,12 +67,12 @@ const _insertJob = (db, table, columns, job) => { }; export const insertPackageJobImpl = (db, job) => { - const columns = [ 'jobId', 'jobType', 'packageName', 'packageVersion', 'payload' ] + const columns = [ 'jobId', 'jobType', 'payload' ] return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; export const insertMatrixJobImpl = (db, job) => { - const columns = [ 'jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload' ] + const columns = [ 'jobId', 'compilerVersion', 'payload' ] return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 8c117fda7..b56575757 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -59,10 +59,11 @@ import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Registry.API.V1 (JobId(..), LogLevel, LogLine) import Registry.API.V1 as API.V1 import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType import Registry.Operation (PackageOperation, PackageSetOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName @@ -175,26 +176,22 @@ deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl type InsertPackageJob = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , packageVersion :: Version , payload :: PackageOperation } type JSInsertPackageJob = { jobId :: String , jobType :: String - , packageName :: String - , packageVersion :: String , payload :: String } insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep { jobId, jobType, packageName, packageVersion, payload } = +insertPackageJobToJSRep { jobId, payload } = { jobId: un JobId jobId - , jobType: API.V1.printJobType jobType - , packageName: PackageName.print packageName - , packageVersion: Version.print packageVersion + , jobType: JobType.print case payload of + Operation.Publish _ -> JobType.PublishJob + Operation.Authenticated { payload: Operation.Unpublish _ } -> JobType.UnpublishJob + Operation.Authenticated { payload: Operation.Transfer _ } -> JobType.TransferJob , payload: stringifyJson Operation.packageOperationCodec payload } @@ -206,25 +203,19 @@ insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertP type InsertMatrixJob = { jobId :: JobId - , packageName :: PackageName - , packageVersion :: Version , compilerVersion :: Version , payload :: Map PackageName Version } type JSInsertMatrixJob = { jobId :: String - , packageName :: String - , packageVersion :: String , compilerVersion :: String , payload :: String } insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob -insertMatrixJobToJSRep { jobId, packageName, packageVersion, compilerVersion, payload } = +insertMatrixJobToJSRep { jobId, compilerVersion, payload } = { jobId: un JobId jobId - , packageName: PackageName.print packageName - , packageVersion: Version.print packageVersion , compilerVersion: Version.print compilerVersion , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } @@ -257,7 +248,7 @@ insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< i type PackageJobDetails = { jobId :: JobId - , jobType :: JobType + , jobType :: JobType.JobType , packageName :: PackageName , packageVersion :: Version , payload :: PackageOperation @@ -277,7 +268,7 @@ type JSPackageJobDetails = packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do - ty <- API.V1.parseJobType jobType + ty <- JobType.parse jobType name <- PackageName.parse packageName version <- Version.parse packageVersion created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index e1dd9b29c..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,446 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Control.Parallel as Parallel -import Data.Codec.JSON as CJ -import Data.DateTime (DateTime(..)) -import Data.DateTime as DateTime -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Lens (Lens') -import Data.Lens as Lens -import Data.Lens.Record as Lens.Record -import Data.Newtype (unwrap) -import Data.String as String -import Data.Time.Duration (Minutes(..)) -import Data.UUID.Random as UUID -import Effect.Aff (Fiber, Milliseconds(..)) -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Effect.Ref as Ref -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API (COMPILER_CACHE, _compilerCache) -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, SQLite, PackageSetJobDetails) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except -import Run.Except as Run.Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -data JobDetails - = PackageJob PackageJobDetails - | MatrixJob MatrixJobDetails - | PackageSetJob PackageSetJobDetails - -findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = do - Db.selectNextPackageJob >>= case _ of - Just job -> pure $ Just $ PackageJob job - Nothing -> Db.selectNextMatrixJob >>= case _ of - Just job -> pure $ Just $ MatrixJob job - Nothing -> Db.selectNextPackageSetJob >>= case _ of - Just job -> pure $ Just $ PackageSetJob job - Nothing -> pure Nothing - -runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) -runJobExecutor env = do - runEffects env Db.deleteIncompleteJobs >>= case _ of - Left err -> pure $ Left err - Right _ -> loop - where - loop = runEffects env findNextAvailableJob >>= case _ of - Left err -> - pure $ Left err - - Right Nothing -> do - Aff.delay (Milliseconds 100.0) - loop - - Right (Just job) -> do - now <- nowUTC - - let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId - - -- We race the job execution against a timeout; if the timeout happens first, - -- we kill the job and move on to the next one. - jobResult <- do - let execute = map Just (runEffects env (executeJob now job)) - let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes - let timeout = Aff.delay (Milliseconds delay) $> Nothing - Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout - - finishResult <- runEffects env $ case jobResult of - Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." - Db.finishJob { jobId, finishedAt: now, success: false } - - Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err - Db.finishJob { jobId, finishedAt: now, success: false } - - Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." - Db.finishJob { jobId, finishedAt: now, success: true } - - case finishResult of - Left err -> pure $ Left err - Right _ -> loop - -executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit -executeJob now = case _ of - PackageJob { jobId } -> do - Db.startJob { jobId, startedAt: now } - pure unit -- UNIMPLEMENTED - MatrixJob _details -> - pure unit -- UNIMPLEMENTED - PackageSetJob _details -> - pure unit -- UNIMPLEMENTED - -squashCommitRegistry :: Run ServerEffects Unit -squashCommitRegistry = do - pure unit - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - -- publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - -- lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - -- forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - -- Log.info $ "Received Publish request, job id: " <> unwrap jobId - -- API.publish Nothing publish - HTTPurple.emptyResponse Status.ok - - Unpublish, Post -> do - -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - -- case auth.payload of - -- Operation.Unpublish { name, version } -> do - -- forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - -- Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - -- API.authenticated auth - -- _ -> - -- HTTPurple.badRequest "Expected unpublish operation." - HTTPurple.emptyResponse Status.ok - - Transfer, Post -> do - HTTPurple.emptyResponse Status.ok - -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - -- case auth.payload of - -- Operation.Transfer { name } -> do - -- forkPipelineJob name "" TransferJob \jobId -> do - -- Log.info $ "Received Transfer request, job id: " <> unwrap jobId - -- API.authenticated auth - -- _ -> - -- HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right Nothing -> - HTTPurple.notFound - Right (Just job) -> do - HTTPurple.emptyResponse Status.ok - -- TODO: Return the job details (will need to update the jobCodec and move the various - -- details into the API module). - -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - -- where - -- forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - -- forkPipelineJob packageName ref jobType action = do - -- -- First thing we check if the package already has a pipeline in progress - -- lift (Db.runningJobForPackage packageName) >>= case _ of - -- -- If yes, we error out if it's the wrong kind, return it if it's the same type - -- Right { jobId, jobType: runningJobType } -> do - -- lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - -- case runningJobType == jobType of - -- true -> jsonOk V1.jobCreatedResponseCodec { jobId } - -- false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- -- otherwise spin up a new thread - -- Left _err -> do - -- lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - -- jobId <- newJobId - -- now <- nowUTC - -- let newJob = { createdAt: now, jobId, jobType, packageName, ref } - -- lift $ Db.createJob newJob - -- let newEnv = env { jobId = Just jobId } - - -- _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - -- result <- runEffects newEnv (action jobId) - -- case result of - -- Left _ -> pure unit - -- Right _ -> do - -- finishedAt <- nowUTC - -- void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - -- jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".temp/local-server/.env.local" - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer - :: ServerEnv - -> (ServerEnv -> Request Route -> Run ServerEffects Response) - -> Request Route - -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest env.vars.resourceEnv.healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port: 8080 - , onStarted - } - { route: V1.routes - , router: runServer env router - } - pure unit - where - onStarted :: Effect Unit - onStarted = do - Console.log $ String.joinWith "\n" - [ " ┌───────────────────────────────────────────┐" - , " │ Server now up on port 8080 │" - , " │ │" - , " │ To test, run: │" - , " │ > curl -v localhost:8080/api/v1/jobs │" - , " └───────────────────────────────────────────┘" - ] - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret (Source.handle Source.Recent) - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..1f6fdc489 --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,188 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Comment (COMMENT) +import Registry.App.Effect.Comment as Comment +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.deleteIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Comment.interpret Comment.handleLog + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..27af29a24 --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,84 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Cont (ContT) +import Data.Codec.JSON as CJ +import Data.UUID.Random as UUID +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk) +import Registry.Operation (PackageOperation) +import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Run (Run) +import Run.Except as Run.Except + +router :: ServerEnv -> Request Route -> Run ServerEffects Response +router env { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + forkPackageJob $ Operation.Publish publish + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected transfer operation." + + Jobs, Get -> do + jsonOk (CJ.array V1.jobCodec) [] + + Job jobId { level: maybeLogLevel, since }, Get -> do + let logLevel = fromMaybe Error maybeLogLevel + logs <- lift $ Db.selectLogsByJob jobId logLevel since + lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.notFound + Right Nothing -> + HTTPurple.notFound + Right (Just job) -> do + HTTPurple.emptyResponse Status.ok + -- TODO: Return the job details (will need to update the jobCodec and move the various + -- details into the API module). + -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound + where + forkPackageJob :: PackageOperation -> ContT Response (Run _) Response + forkPackageJob operation = do + lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) + jobId <- newJobId + lift $ Db.insertPackageJob { jobId, payload: operation } + jsonOk V1.jobCreatedResponseCodec { jobId } + + newJobId :: forall m. MonadEffect m => m JobId + newJobId = liftEffect do + id <- UUID.make + pure $ JobId $ UUID.toString id diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 67216ca35..31c15866c 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,6 +15,7 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') @@ -66,7 +67,7 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType + , jobType :: JobType.JobType , packageName :: PackageName , createdAt :: DateTime , finishedAt :: Maybe DateTime @@ -77,7 +78,7 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec + , jobType: JobType.codec , packageName: PackageName.codec , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime @@ -92,26 +93,6 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob - -derive instance Eq JobType - -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - -printJobType :: JobType -> String -printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) - type LogLine = { level :: LogLevel , message :: String diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs new file mode 100644 index 000000000..b8dceaf38 --- /dev/null +++ b/lib/src/JobType.purs @@ -0,0 +1,26 @@ +module Registry.JobType where + +import Prelude +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Sum as CJ.Sum +import Data.Either (Either(..), hush) + +data JobType = PublishJob | UnpublishJob | TransferJob + +derive instance Eq JobType + +parse :: String -> Either String JobType +parse = case _ of + "publish" -> Right PublishJob + "unpublish" -> Right UnpublishJob + "transfer" -> Right TransferJob + j -> Left $ "Invalid job type " <> show j + +print :: JobType -> String +print = case _ of + PublishJob -> "publish" + UnpublishJob -> "unpublish" + TransferJob -> "transfer" + +codec :: CJ.Codec JobType +codec = CJ.Sum.enumSum print (hush <<< parse) diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 521bc2883..518c1a6de 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,8 +14,8 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData @@ -23,13 +23,15 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageName , packageOperationCodec , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec , unpublishCodec - ) where + ) + where import Prelude @@ -60,6 +62,13 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + -- | A codec for encoding and decoding a `PackageOperation` as JSON. packageOperationCodec :: CJ.Codec PackageOperation packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode @@ -88,6 +97,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -98,6 +108,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } From 8438df7854177740df5a1dcb077216567d618654 Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Wed, 25 Jun 2025 20:04:48 -0400 Subject: [PATCH 4/6] Fix up build --- app/src/App/JobExecutor.purs | 76 ++-- app/test/App/API.purs | 3 + app/test/App/GitHubIssue.purs | 3 + package-lock.json | 611 +++++++++++++++++++++++++++++++- package.json | 5 +- scripts/src/LegacyImporter.purs | 1 + scripts/src/PackageDeleter.purs | 1 + 7 files changed, 659 insertions(+), 41 deletions(-) diff --git a/app/src/App/JobExecutor.purs b/app/src/App/JobExecutor.purs index 0bd6fa44f..e5d29bd95 100644 --- a/app/src/App/JobExecutor.purs +++ b/app/src/App/JobExecutor.purs @@ -11,7 +11,7 @@ import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) -import Registry.App.Server.Env (ServerEnv, ServerEffects, runEffects) +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Run (Run) import Run.Except (EXCEPT) @@ -31,57 +31,55 @@ findNextAvailableJob = do Nothing -> pure Nothing runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) -runJobExecutor env = do - runEffects env Db.deleteIncompleteJobs >>= case _ of - Left err -> pure $ Left err - Right _ -> loop +runJobExecutor env = runEffects env do + Db.deleteIncompleteJobs + loop where - loop = runEffects env findNextAvailableJob >>= case _ of - Left err -> - pure $ Left err + loop = do + mJob <- findNextAvailableJob + case mJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 100.0) + loop - Right Nothing -> do - Aff.delay (Milliseconds 100.0) - loop + Just job -> do + now <- nowUTC - Right (Just job) -> do - now <- nowUTC + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId - let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId + Db.startJob { jobId, startedAt: now } - -- We race the job execution against a timeout; if the timeout happens first, - -- we kill the job and move on to the next one. - jobResult <- do - let execute = Just <$> runEffects env (executeJob now job) - let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes - let timeout = Aff.delay (Milliseconds delay) $> Nothing - Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- liftAff do + let execute = Just <$> (runEffects env $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout - finishResult <- runEffects env case jobResult of - Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." - Db.finishJob { jobId, finishedAt: now, success: false } + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + pure false - Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err - Db.finishJob { jobId, finishedAt: now, success: false } + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + pure false - Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." - Db.finishJob { jobId, finishedAt: now, success: true } + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + pure true - case finishResult of - Left err -> pure $ Left err - Right _ -> loop + Db.finishJob { jobId, finishedAt: now, success } + loop executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob now = case _ of PackageJob { jobId } -> do - Db.startJob { jobId, startedAt: now } pure unit -- UNIMPLEMENTED MatrixJob _details -> pure unit -- UNIMPLEMENTED diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 3a8c66ef2..7d1861052 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -96,6 +96,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version , resolutions: Nothing } @@ -158,6 +159,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" , resolutions: Nothing } Registry.readAllManifests >>= \idx -> @@ -172,6 +174,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } Registry.readAllManifests >>= \idx -> diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..8276bf708 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing diff --git a/package-lock.json b/package-lock.json index 2e810e01b..7b5e827c8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,10 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } }, "app": { "name": "registry-app", @@ -1598,6 +1601,12 @@ "node": ">=14.0.0" } }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -1653,6 +1662,15 @@ "prebuild-install": "^7.1.1" } }, + "node_modules/big-integer": { + "version": "1.6.52", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", + "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", + "license": "Unlicense", + "engines": { + "node": ">=0.6" + } + }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1681,6 +1699,18 @@ "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" }, + "node_modules/bplist-parser": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", + "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", + "license": "MIT", + "dependencies": { + "big-integer": "^1.6.44" + }, + "engines": { + "node": ">= 5.10.0" + } + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1733,6 +1763,21 @@ "node": ">=10.0.0" } }, + "node_modules/bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "license": "MIT", + "dependencies": { + "run-applescript": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -1760,6 +1805,20 @@ "node": ">=10.0.0" } }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -1782,6 +1841,52 @@ "node": ">=4.0.0" } }, + "node_modules/default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "license": "MIT", + "dependencies": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "license": "MIT", + "dependencies": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", @@ -1803,6 +1908,50 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "license": "BSD-2-Clause", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", + "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -1924,6 +2073,18 @@ "node": ">=10" } }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -1964,6 +2125,15 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, + "node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=14.18.0" + } + }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -2002,6 +2172,21 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -2021,6 +2206,24 @@ "node": ">=0.10.0" } }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -2037,6 +2240,51 @@ "node": ">=0.10.0" } }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-wsl/node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -2056,6 +2304,15 @@ "jsonrepair": "bin/cli.js" } }, + "node_modules/linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "license": "MIT", + "dependencies": { + "uc.micro": "^1.0.1" + } + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -2067,6 +2324,34 @@ "node": ">=10" } }, + "node_modules/markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", + "license": "MIT" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "license": "MIT" + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -2087,6 +2372,18 @@ "node": ">=8.6" } }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -2205,6 +2502,33 @@ } } }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -2213,6 +2537,39 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", + "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", + "license": "MIT", + "dependencies": { + "default-browser": "^4.0.0", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", @@ -2221,6 +2578,15 @@ "node": ">=0.10.0" } }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2266,6 +2632,15 @@ "once": "^1.3.1" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -2347,6 +2722,110 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/run-applescript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", + "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", + "license": "MIT", + "dependencies": { + "execa": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/run-applescript/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/run-applescript/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/run-applescript/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/run-applescript/node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -2407,6 +2886,33 @@ "node": ">=10" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -2450,6 +2956,34 @@ "simple-concat": "^1.0.0" } }, + "node_modules/spago": { + "version": "0.93.19", + "resolved": "https://registry.npmjs.org/spago/-/spago-0.93.19.tgz", + "integrity": "sha512-BOSwPQSbULxlFmTjf5YXrvQtvQjRsqHdcbHo60ENbj4W1N8yPlyWKHzgRiayi7VE4av+d0v6x1OBGGL5lO+vsQ==", + "license": "BSD-3-Clause", + "dependencies": { + "better-sqlite3": "^8.6.0", + "env-paths": "^3.0.0", + "fast-glob": "^3.2.11", + "fs-extra": "^10.0.0", + "fuse.js": "^6.5.3", + "glob": "^7.1.6", + "markdown-it": "^12.0.4", + "open": "^9.1.0", + "punycode": "^2.3.0", + "semver": "^7.3.5", + "spdx-expression-parse": "^3.0.1", + "ssh2": "^1.14.0", + "supports-color": "^9.2.3", + "tar": "^6.1.11", + "tmp": "^0.2.1", + "xhr2": "^0.2.1", + "yaml": "^2.1.1" + }, + "bin": { + "spago": "bin/bundle.js" + } + }, "node_modules/spdx-exceptions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", @@ -2494,6 +3028,18 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -2507,6 +3053,18 @@ "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" }, + "node_modules/supports-color": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz", + "integrity": "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", @@ -2554,6 +3112,18 @@ "node": ">=6" } }, + "node_modules/titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tmp": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", @@ -2602,6 +3172,12 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "license": "MIT" + }, "node_modules/universal-user-agent": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", @@ -2615,6 +3191,15 @@ "node": ">= 10.0.0" } }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -2642,11 +3227,35 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/xhr2": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/xhr2/-/xhr2-0.2.1.tgz", + "integrity": "sha512-sID0rrVCqkVNUn8t6xuv9+6FViXjUVXq8H5rWOH2rz9fDNQEd4g0EA2XlcEdJXRz5BMEn4O1pJFdT+z4YHhoWw==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index 76bc4e96e..5066e42c0 100644 --- a/package.json +++ b/package.json @@ -6,5 +6,8 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } } diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index d642d41dc..0fdc94a06 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -471,6 +471,7 @@ runLegacyImport logs = do { name: manifest.name , location: Just manifest.location , ref + , version: manifest.version , compiler , resolutions: Just resolutions } diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index f0cb1c63f..db9b54d23 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -243,6 +243,7 @@ deleteVersion arguments name version = do { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref + , version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } From 20416f0331eac85c5eb2462a7b7b7a8b19ad7ae2 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Tue, 2 Dec 2025 17:19:43 -0500 Subject: [PATCH 5/6] Overhaul and modernize Nix flake setup (#706) * add envrc for direnv users this was already used by several developers, this just makes it official * replace run-tests-script with nix flake check this also allows us to remove our single 'spago test' invocation in a github workflow, so all CI checks are in the Nix checks for Garnix to execute while i was at it, i removed the nix flakes wrapper, shell.nix, and flake-compat inclusions as at this point pretty much everyone is on a flake-compatible version of Nix * extract wiremock mappings * migrate to buildNpmPackage from slimlock slimlock was the homegrown npm dependency builder from ps-overlay from before when buildNpmPackage was a suitable choice for the registry, but at this point it's well-suited to be the standard choice for us. * overhaul nix flake setup our homegrown nix setup has diverged quite a bit from modern best practices and is hard to read. it also included some unnecessary helpers (like run-vm to work on a local vm) which we can remove. this change overhauls the full setup while maintaining the existing tests to make it simpler, more performant, and more usable for contributors. the deployments are still simple ('colmena apply'). --- .envrc | 1 + .github/workflows/main.yml | 32 - .gitignore | 1 - CONTRIBUTING.md | 23 +- app/default.nix | 160 ----- flake.lock | 68 +- flake.nix | 871 +++--------------------- nix/lib/buildRegistryPackage.nix | 83 +++ nix/lib/default.nix | 5 + nix/lib/parseEnv.nix | 19 + nix/overlays/default.nix | 13 + nix/overlays/registry.nix | 211 ++++++ nix/{module.nix => registry-server.nix} | 123 ++-- nix/test-vm.nix | 37 - nix/test/git-mock.mjs | 53 ++ nix/test/integration.nix | 394 +++++++++++ nix/wiremock.nix | 114 ---- scripts/default.nix | 82 --- shell.nix | 11 - 19 files changed, 952 insertions(+), 1349 deletions(-) create mode 100644 .envrc delete mode 100644 .github/workflows/main.yml delete mode 100644 app/default.nix create mode 100644 nix/lib/buildRegistryPackage.nix create mode 100644 nix/lib/default.nix create mode 100644 nix/lib/parseEnv.nix create mode 100644 nix/overlays/default.nix create mode 100644 nix/overlays/registry.nix rename nix/{module.nix => registry-server.nix} (54%) delete mode 100644 nix/test-vm.nix create mode 100644 nix/test/git-mock.mjs create mode 100644 nix/test/integration.nix delete mode 100644 nix/wiremock.nix delete mode 100644 scripts/default.nix delete mode 100644 shell.nix diff --git a/.envrc b/.envrc new file mode 100644 index 000000000..3550a30f2 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 1305e5bf9..000000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: tests - -on: - push: - branches: [master] - pull_request: - -jobs: - tests: - runs-on: ubuntu-latest - steps: - # Setup - - name: Check out source repository - uses: actions/checkout@v2 - - - name: Install Nix - uses: DeterminateSystems/nix-installer-action@v4 - - - name: Setup Nix cache - uses: DeterminateSystems/magic-nix-cache-action@v2 - - # Separates all the cache downloading time from the time required to actually run a step - # of the workflow - - name: Initialize Nix - run: nix develop - - # Unfortunately I can't run 'spago test' in a derivation because it doesn't - # have a mode that ignores the cache. So we run it in a script instead. - # Once we can make this a normal derivation then we can delete this - # workflow file altogether. - - name: Run Spago tests - run: nix develop --command run-tests-script diff --git a/.gitignore b/.gitignore index 0c3248c1d..9a52b9e8f 100644 --- a/.gitignore +++ b/.gitignore @@ -19,4 +19,3 @@ result # Keep it secret, keep it safe. .env -.envrc diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0651395fe..36493fe4d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,9 +39,23 @@ There are three more directories containing code for the registry. Finally, the `flake.nix` file orchestrates builds for the whole repository. +## Running the Registry Server Locally + +The registry server requires a `.env` file and an initialized database. To run the server for development: + +```sh +# 1. Ensure database is initialized (only needed once) +dbmate up + +# 2. Run the server (from the nix shell) +cd app && spago run +``` + +The server will load environment variables from the `.env` file in the project root and run on port 8080 by default. + ## Available Nix Commands -The Registry server can be run locally: +You can also run the packaged registry server: ```sh nix run .#server @@ -81,13 +95,6 @@ There is an integration test that will deploy the registry server and make reque nix build .#checks.x86_64-linux.integration ``` -You can "deploy" the registry server to a local VM and manually hit the API as if it were the production server: - -```sh -# The server will be available at localhost:8080 -nix run -``` - ### Testing Guidelines The PureScript code in the registry is well-tested, ranging from tests for individual functions to full end-to-end tests for the registry server running in a NixOS machine configured the same way as the deployed machine. The smaller and more pure the test, the easier it is to write and maintain; most code is tested via unit tests written with `spec`, and only the core pipelines are run in the integration test. diff --git a/app/default.nix b/app/default.nix deleted file mode 100644 index 2c757bcd8..000000000 --- a/app/default.nix +++ /dev/null @@ -1,160 +0,0 @@ -{ - makeWrapper, - lib, - stdenv, - purs-backend-es-unstable, - esbuild, - writeText, - nodejs, - compilers, - purs-versions, - dhall, - dhall-json, - git, - git-lfs, - licensee, - coreutils, - gzip, - gnutar, - # from the registry at the top level - spago-lock, - package-lock, -}: -let - # Since both the importer and the server share the same build process, we - # don't need to build them twice separately and can share an optimized output - # directory. - shared = stdenv.mkDerivation { - name = "registry-app-shared"; - src = ./.; - phases = [ - "buildPhase" - "installPhase" - ]; - nativeBuildInputs = [ purs-backend-es-unstable ]; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - ln -s ${spago-lock}/output . - echo "Optimizing with purs-backend-es..." - purs-backend-es build - ''; - installPhase = '' - mkdir $out; - cp -r output-es $out/output; - # This for loop exists because purs-backend-es finds the corefn.json files - # just fine, but doesn't find the foreign.js files. - # I suspect this is because of a quirky interaction between Nix and `copyFile`, - # but I'm not sure how to fix it so we work around it by copying the foreign - # modules by hand. - for dir in output/*/; do - subdir=$(basename "$dir") - if [ -f "output/$subdir/foreign.js" ]; then - cp "output/$subdir/foreign.js" "$out/output/$subdir/" || true; - fi - done - ''; - }; -in -{ - server = stdenv.mkDerivation rec { - name = "registry-server"; - src = ./.; - database = ../db; - nativeBuildInputs = [ - esbuild - makeWrapper - ]; - buildInputs = [ nodejs ]; - entrypoint = writeText "entrypoint.js" '' - import { main } from "./output/Registry.App.Server"; - main(); - ''; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - cp -r ${shared}/output . - cp ${entrypoint} entrypoint.js - esbuild entrypoint.js --bundle --outfile=${name}.js --platform=node --packages=external - ''; - installPhase = '' - mkdir -p $out/bin - - echo "Copying files..." - cp ${name}.js $out/${name}.js - ln -s ${package-lock}/js/node_modules $out - - echo "Copying database..." - cp -r ${database} $out/bin/db - - echo "Creating node script..." - echo '#!/usr/bin/env sh' > $out/bin/${name} - echo 'exec ${nodejs}/bin/node '"$out/${name}.js"' "$@"' >> $out/bin/${name} - chmod +x $out/bin/${name} - ''; - postFixup = '' - wrapProgram $out/bin/${name} \ - --set PATH ${ - lib.makeBinPath [ - compilers - purs-versions - dhall - dhall-json - licensee - git - git-lfs - coreutils - gzip - gnutar - ] - } \ - ''; - }; - - github-importer = stdenv.mkDerivation rec { - name = "registry-github-importer"; - src = ./.; - nativeBuildInputs = [ - esbuild - makeWrapper - ]; - buildInputs = [ nodejs ]; - entrypoint = writeText "entrypoint.js" '' - import { main } from "./output/Registry.App.GitHubIssue"; - main(); - ''; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - cp -r ${shared}/output . - cp ${entrypoint} entrypoint.js - esbuild entrypoint.js --bundle --outfile=${name}.js --platform=node --packages=external - ''; - installPhase = '' - mkdir -p $out/bin $out - - echo "Copying files..." - cp ${name}.js $out/${name}.js - ln -s ${package-lock}/js/node_modules $out - - echo "Creating node script..." - echo '#!/usr/bin/env sh' > $out/bin/${name} - echo 'exec ${nodejs}/bin/node '"$out/${name}.js"' "$@"' >> $out/bin/${name} - chmod +x $out/bin/${name} - ''; - postFixup = '' - wrapProgram $out/bin/${name} \ - --set PATH ${ - lib.makeBinPath [ - compilers - purs-versions - dhall - dhall-json - licensee - git - git-lfs - coreutils - gzip - gnutar - ] - } \ - ''; - }; -} diff --git a/flake.lock b/flake.lock index 71e823ad4..ad7901f70 100644 --- a/flake.lock +++ b/flake.lock @@ -1,22 +1,6 @@ { "nodes": { "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1747046372, - "narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-compat_2": { "flake": false, "locked": { "lastModified": 1696426674, @@ -62,11 +46,11 @@ "registry-index": "registry-index" }, "locked": { - "lastModified": 1749349364, - "narHash": "sha256-2/oMkW1ORJdVa3cMeRfoQj/hR4FfZMs79Jn/h5UrQx8=", + "lastModified": 1764469420, + "narHash": "sha256-ASzYEhZ4F8O+hhbYFmgGnAjlIE1xXDvzwv5ifHglU9c=", "owner": "jeslie0", "repo": "mkSpagoDerivation", - "rev": "fc16120512aaccb9950d7a8acc9198c89c9b2315", + "rev": "24f0b27ca00ac02c86e7a9d2d2115edd356006f3", "type": "github" }, "original": { @@ -77,23 +61,23 @@ }, "nixpkgs": { "locked": { - "lastModified": 1749640896, - "narHash": "sha256-oVVwB+4NH7aGysjLtsxsOPspUpDsa5nSH1G8CtD6fK4=", + "lastModified": 1764638368, + "narHash": "sha256-ln1kqV0B2epgFWUeCy+wupfVSFlpMZF8uu9nuXh8C7c=", "owner": "nixos", "repo": "nixpkgs", - "rev": "bf8a7649aaf6567c0c893db016956f7333a50c38", + "rev": "82b75b674b7263dc71ff8c6f5d2ea70686d22b7e", "type": "github" }, "original": { "owner": "nixos", - "ref": "release-25.05", + "ref": "release-25.11", "repo": "nixpkgs", "type": "github" } }, "purescript-overlay": { "inputs": { - "flake-compat": "flake-compat_2", + "flake-compat": "flake-compat", "nixpkgs": [ "nixpkgs" ], @@ -116,11 +100,11 @@ "registry": { "flake": false, "locked": { - "lastModified": 1747985987, - "narHash": "sha256-NHkksW17JaoiMpexAEJ9EQEygFKuv70CooZlsE7/OPs=", + "lastModified": 1763538112, + "narHash": "sha256-/+ug37TQbiEi9w8Lv+f55IE4i0LvBT7yAeff7Fno3mw=", "owner": "purescript", "repo": "registry", - "rev": "33e4d13c6dfbc908c24dffa35f5e28585a383cd7", + "rev": "b9313da46132ce93f2255ba31708633eef19f5ac", "type": "github" }, "original": { @@ -132,11 +116,11 @@ "registry-index": { "flake": false, "locked": { - "lastModified": 1747925902, - "narHash": "sha256-0eIDKoKhx27wDydfYpxp5rD7UjX0YmX/10I0SMi/vnY=", + "lastModified": 1763537903, + "narHash": "sha256-nHQxIUXFcpnb9V6+eLlyoGE1+Lq/mVO8yWW3bdgV+g0=", "owner": "purescript", "repo": "registry-index", - "rev": "8898112c4cc9d275503a416f9a7ff07b9c675339", + "rev": "d59cc60a616067bba7b39e71de054f596e5c28cd", "type": "github" }, "original": { @@ -147,12 +131,10 @@ }, "root": { "inputs": { - "flake-compat": "flake-compat", "flake-utils": "flake-utils", "mkSpagoDerivation": "mkSpagoDerivation", "nixpkgs": "nixpkgs", - "purescript-overlay": "purescript-overlay", - "slimlock": "slimlock_2" + "purescript-overlay": "purescript-overlay" } }, "slimlock": { @@ -176,26 +158,6 @@ "type": "github" } }, - "slimlock_2": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1746029857, - "narHash": "sha256-431slzM10HQixP4oQlCwGxUPD8wo4DWVGnIcttqyeEs=", - "owner": "thomashoneyman", - "repo": "slimlock", - "rev": "c49740738a026a00ab6be19300e8cf7b6de03fd7", - "type": "github" - }, - "original": { - "owner": "thomashoneyman", - "repo": "slimlock", - "type": "github" - } - }, "systems": { "locked": { "lastModified": 1681028828, diff --git a/flake.nix b/flake.nix index ea77ce3fd..a6bfe7652 100644 --- a/flake.nix +++ b/flake.nix @@ -2,21 +2,15 @@ description = "The PureScript Registry"; inputs = { - nixpkgs.url = "github:nixos/nixpkgs/release-25.05"; + nixpkgs.url = "github:nixos/nixpkgs/release-25.11"; flake-utils.url = "github:numtide/flake-utils"; - flake-compat.url = "github:edolstra/flake-compat"; - flake-compat.flake = false; - purescript-overlay.url = "github:thomashoneyman/purescript-overlay"; purescript-overlay.inputs.nixpkgs.follows = "nixpkgs"; mkSpagoDerivation.url = "github:jeslie0/mkSpagoDerivation"; mkSpagoDerivation.inputs.nixpkgs.follows = "nixpkgs"; mkSpagoDerivation.inputs.ps-overlay.follows = "purescript-overlay"; - - slimlock.url = "github:thomashoneyman/slimlock"; - slimlock.inputs.nixpkgs.follows = "nixpkgs"; }; outputs = @@ -25,7 +19,6 @@ flake-utils, purescript-overlay, mkSpagoDerivation, - slimlock, ... }: let @@ -41,23 +34,23 @@ pureScriptFileset = fileset.intersection (fileset.gitTracked ./.) ( fileset.unions [ ./app - (fileset.maybeMissing ./check) ./foreign ./lib ./scripts ./test-utils + (fileset.maybeMissing ./check) ./spago.lock ./spago.yaml + ./types ] ); npmFileset = fileset.intersection (fileset.gitTracked ./.) ( - fileset.fileFilter (file: file.name == "package.json" || file.name == "package-lock.json") ./. + fileset.fileFilter (f: f.name == "package.json" || f.name == "package-lock.json") ./. ); - # We can't import from remote urls in dhall when running in CI or other - # network-restricted environments, so we fetch the repository and use the - # local path instead. + DHALL_TYPES = ./types; + GIT_LFS_SKIP_SMUDGE = 1; DHALL_PRELUDE = "${ builtins.fetchGit { url = "https://github.com/dhall-lang/dhall-lang"; @@ -65,335 +58,97 @@ } }/Prelude/package.dhall"; - # The location of the Dhall type specifications, used to type-check manifests. - DHALL_TYPES = ./types; - - # We disable git-lfs files explicitly, as this is intended for large files - # (typically >4GB), and source packgaes really ought not be shipping large - # files — just source code. - GIT_LFS_SKIP_SMUDGE = 1; - registryOverlay = final: prev: rec { - nodejs = prev.nodejs_20; - - # We don't want to force everyone to update their configs if they aren't - # normally on flakes. - nixFlakes = prev.writeShellScriptBin "nixFlakes" '' - exec ${prev.nixVersions.stable}/bin/nix --experimental-features "nix-command flakes" "$@" - ''; - - # Detects arguments to 'git' containing a URL and replaces them with a - # local filepath. This is a drop-in replacement for 'git' that should be - # used in offline / test environments when we only want fixture data. - gitMock = - let - nodeScript = - script: - prev.writeScript "node-cmd" '' - ${nodejs}/bin/node -e "${script}" "$@" - ''; - - mock = nodeScript '' - const { URL } = require('url'); - const { spawn } = require('child_process'); - - const repoFixturesDir = process.env.REPO_FIXTURES_DIR; - if (!repoFixturesDir) { - throw new Error('REPO_FIXTURES_DIR is not set, but is required.'); - } - - // Replace any URL arguments with the local fixtures path. - function replaceIfUrl(arg) { - try { - const url = new URL(arg); - const path = url.pathname.replace(/\.git$/, '''); - const file = 'file://' + repoFixturesDir + path; - console.log(file); - return file; - } catch (e) { - // Not a URL, ignore - } - return arg; - } - - const args = process.argv.slice(1); - const modified = []; - for (let i = 0; i < args.length; i++) { - const arg = args[i]; - modified.push(replaceIfUrl(arg)); - } - - const git = spawn('${prev.git}/bin/git', modified); - - git.stdout.on('data', (data) => { - console.log(data.toString('utf8')); - }); - - git.stderr.on('data', (data) => { - console.error(data.toString('utf8')); - }); - - git.on('close', (code) => { - if (code !== 0) { - throw new Error('git exited with code ' + code); - } - }); - ''; - in - prev.writeShellScriptBin "git" '' - exec ${mock} "$@" - ''; - - # Packages associated with the registry, ie. in this repository. - registry = - let - spago-lock = prev.mkSpagoDerivation { - name = "registry"; - src = ./.; - nativeBuildInputs = [ - prev.pkgs.spago-bin.spago-0_93_44 - prev.pkgs.purescript - ]; - buildPhase = "spago build"; - installPhase = "mkdir $out; cp -r * $out"; - }; - - package-lock = - (prev.slimlock.buildPackageLock { - src = fileset.toSource { - root = ./.; - fileset = npmFileset; - }; - omit = [ - "dev" - "peer" - ]; - }) - # better-sqlite3 relies on node-gyp and python3 in the build environment, so - # we add those to the native build inputs. - .overrideAttrs - ( - finalAttrs: prevAttrs: { - nativeBuildInputs = - ( - prevAttrs.nativeBuildInputs or [ ] - ++ [ - prev.python3 - prev.nodePackages.node-gyp - ] - ) - ++ (if prev.stdenv.isDarwin then [ prev.darwin.cctools ] else [ ]); - } - ); + registryLib = import ./nix/lib { lib = nixpkgs.lib; }; - # Produces a list of all PureScript binaries supported by purescript-overlay, - # ie. those from 0.13 onwards, callable using the naming convention - # `purs-MAJOR_MINOR_PATCH`. - # $ purs-0_14_0 --version - # 0.14.0 - # - # To add a new compiler to the list, just update the flake: - # $ nix flake update - supportedCompilers = prev.lib.filterAttrs ( - name: _: (builtins.match "^purs-[0-9]+_[0-9]+_[0-9]+$" name != null) - ) prev.purs-bin; + # Build sources with filesets + spagoSrc = fileset.toSource { + root = ./.; + fileset = pureScriptFileset; + }; - # An attrset containing all the PureScript binaries we want to make - # available. - compilers = prev.symlinkJoin { - name = "purs-compilers"; - paths = prev.lib.mapAttrsToList ( - name: drv: - prev.writeShellScriptBin name '' - exec ${drv}/bin/purs "$@" - '' - ) supportedCompilers; - }; + npmSrc = fileset.toSource { + root = ./.; + fileset = npmFileset; + }; - purs-versions = prev.writeShellScriptBin "purs-versions" '' - echo ${ - prev.lib.concatMapStringsSep " " ( - x: prev.lib.removePrefix "purs-" (builtins.replaceStrings [ "_" ] [ "." ] x) - ) (prev.lib.attrNames supportedCompilers) - } - ''; - in - { - apps = prev.callPackages ./app { - inherit - compilers - purs-versions - package-lock - spago-lock - ; - }; - scripts = prev.callPackages ./scripts { - inherit - compilers - purs-versions - package-lock - spago-lock - ; - }; - inherit - purs-versions - compilers - package-lock - spago-lock - ; - }; + # Overlays + overlays = import ./nix/overlays { + inherit + purescript-overlay + mkSpagoDerivation + registryLib + spagoSrc + npmSrc + ; }; in flake-utils.lib.eachSystem supportedSystems ( system: let pkgs = import nixpkgs { - inherit system; - overlays = [ - purescript-overlay.overlays.default - mkSpagoDerivation.overlays.default - slimlock.overlays.default - registryOverlay - ]; + inherit system overlays; }; - inherit (pkgs) lib; - - # We can't run 'spago test' in our flake checks because it tries to - # write to a cache and I can't figure out how to disable it. Instead - # we supply it as a shell script. - # - # Once we can run 'spago test --offline' or something similar, then this - # should just be a normal derivation that links the node_modules, copies - # the output dir locally, and runs 'spago test'. - # - # $ nix develop --command run-tests-script - run-tests-script = pkgs.writeShellScriptBin "run-tests-script" '' - set -euo pipefail - WORKDIR=$(mktemp -d) - cp spago.yaml spago.lock $WORKDIR - cp -a app foreign lib scripts test-utils types $WORKDIR - ln -s ${pkgs.registry.package-lock}/js/node_modules $WORKDIR/node_modules - - pushd $WORKDIR - export HEALTHCHECKS_URL=${defaultEnv.HEALTHCHECKS_URL} - ${pkgs.spago-bin.spago-0_93_44}/bin/spago test - - popd - ''; - - mkAppOutput = drv: { - type = "app"; - program = "${drv}/bin/${drv.name}"; - meta.description = drv.meta.description or "PureScript Registry ${drv.name}"; - }; - - # A full set of environment variables, each set to their default values - # according to the env.example file, or to the values explicitly set below - # (e.g. DHALL_PRELUDE and DHALL_TYPES). - defaultEnv = parseEnv ./.env.example // { + defaultEnv = registryLib.parseEnv ./.env.example // { inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; }; + in + { + packages = pkgs.registry.apps // pkgs.registry.scripts; - # Parse a .env file, skipping empty lines and comments, into Nix attrset - parseEnv = - path: - let - # Filter out lines only containing whitespace or comments - lines = pkgs.lib.splitString "\n" (builtins.readFile path); - noEmpties = builtins.filter (line: builtins.match "^[[:space:]]*$" line == null) lines; - noComments = builtins.filter (line: builtins.match "^#.*$" line == null) noEmpties; - toKeyPair = - line: - let - parts = pkgs.lib.splitString "=" line; - in - { - name = builtins.head parts; - value = pkgs.lib.concatStrings (builtins.tail parts); - }; - in - builtins.listToAttrs (builtins.map toKeyPair noComments); - - # Allows you to run a local VM with the registry server, mimicking the - # actual deployment. - run-vm = + apps = let - vm-machine = nixpkgs.lib.nixosSystem { - system = builtins.replaceStrings [ "darwin" ] [ "linux" ] system; - modules = [ - { - nixpkgs.overlays = [ - purescript-overlay.overlays.default - mkSpagoDerivation.overlays.default - slimlock.overlays.default - registryOverlay - ]; - } - ./nix/test-vm.nix - { - services.registry-server = { - enable = true; - host = "localhost"; - port = 8080; - enableCerts = false; - # Note: the default credentials are not valid, so you cannot - # actually publish packages, etc. without overriding the relevant - # env vars below. - envVars = defaultEnv; - }; - } - ]; + mkApp = name: drv: { + type = "app"; + program = "${drv}/bin/${drv.name}"; + meta = drv.meta or { }; }; in - pkgs.writeShellScript "run-vm.sh" '' - export NIX_DISK_IMAGE=$(mktemp -u -t nixos.qcow2.XXXXXXX) - trap "rm -f $NIX_DISK_IMAGE" EXIT - ${vm-machine.config.system.build.vm}/bin/run-registry-vm - ''; - in - rec { - packages = pkgs.registry.apps // pkgs.registry.scripts; - - apps = pkgs.lib.mapAttrs (_: drv: mkAppOutput drv) packages // { - default = { - type = "app"; - program = "${run-vm}"; - meta.description = "Run the registry server in a NixOS VM"; - }; - }; + pkgs.lib.mapAttrs mkApp (pkgs.registry.apps // pkgs.registry.scripts); checks = { + spago-test = + pkgs.runCommand "spago-test" + { + nativeBuildInputs = + with pkgs; + [ + nodejs + purs + ] + ++ registry-runtime-deps; + HEALTHCHECKS_URL = defaultEnv.HEALTHCHECKS_URL or ""; + } + '' + cp -r ${pkgs.registry-spago-lock} src && chmod -R +w src && cd src + ln -s ${pkgs.registry-package-lock}/node_modules . + node -e "import('./output/Test.Registry.Main/index.js').then(m => m.main())" + echo "Tests passed!" > $out + ''; + nix-format = pkgs.runCommand "nix-format" { src = fileset.toSource { root = ./.; - fileset = fileset.fileFilter (file: file.hasExt "nix") ./.; + fileset = fileset.fileFilter (f: f.hasExt "nix") ./.; }; - buildInputs = with pkgs; [ nixfmt-rfc-style ]; + nativeBuildInputs = [ pkgs.nixfmt-rfc-style ]; } '' - set -euo pipefail - nixfmt --check $(find $src -type f) | tee $out + nixfmt --check $(find $src -type f) && touch $out ''; purescript-format = - pkgs.runCommand "purescript-format-check" + pkgs.runCommand "purescript-format" { - src = fileset.toSource { - root = ./.; - fileset = pureScriptFileset; - }; - buildInputs = with pkgs; [ purs-tidy ]; + src = spagoSrc; + nativeBuildInputs = [ pkgs.purs-tidy ]; } '' - set -euo pipefail - purs-tidy check $src | tee $out + purs-tidy check $src && touch $out ''; - # This script verifies that - # - all the dhall we have in the repo actually compiles - # - all the example manifests actually typecheck as Manifests verify-dhall = pkgs.runCommand "verify-dhall" { @@ -404,490 +159,58 @@ ./lib/fixtures/manifests ]; }; - env = { - inherit DHALL_PRELUDE; - }; - buildInputs = with pkgs; [ + nativeBuildInputs = with pkgs; [ dhall dhall-json parallel ]; + inherit DHALL_PRELUDE; } '' - set -euo pipefail - - mkdir -p cache/dhall - export XDG_CACHE_HOME="$PWD/cache" - - find $src/types/v1 -iname "*.dhall" \ - | parallel ${ - lib.strings.escapeShellArgs [ - "--will-cite" - '' - echo "Typechecking {}" - dhall <<< {} | tee $out - '' - ] - } - - find $src/lib/fixtures/manifests -iname "*.json" \ - | parallel ${ - lib.strings.escapeShellArgs [ - "--will-cite" - '' - echo "Conforming {} to the Manifest type" - json-to-dhall --plain --records-loose --unions-strict --file {} $src/types/v1/Manifest.dhall | tee --append $out - '' - ] - } + mkdir -p cache/dhall && export XDG_CACHE_HOME="$PWD/cache" + find $src/types/v1 -name "*.dhall" | parallel --will-cite 'dhall <<< {}' + find $src/lib/fixtures/manifests -name "*.json" | parallel --will-cite \ + 'json-to-dhall --plain --records-loose --unions-strict --file {} $src/types/v1/Manifest.dhall' + touch $out ''; - # This is an integration test that will run the server and allow us to - # test it by sending API requests. You can run only this check with: - # nix build .#checks.${your-system}.integration - integration = - if pkgs.stdenv.isDarwin then - pkgs.runCommand "integration-disabled" { } '' - mkdir $out - echo "Integration tests are not supported on macOS systems, skipping..." - exit 0 - '' - else - let - serverPort = 8080; - githubPort = 9001; - bucketPort = 9002; - s3Port = 9003; - pursuitPort = 9004; - stateDir = "/var/lib/registry-server"; - envVars = defaultEnv // { - # We override all remote APIs with their local wiremock ports - GITHUB_API_URL = "http://localhost:${toString githubPort}"; - S3_API_URL = "http://localhost:${toString s3Port}"; - S3_BUCKET_URL = "http://localhost:${toString bucketPort}"; - PURSUIT_API_URL = "http://localhost:${toString pursuitPort}"; - - # We add an extra env var for the mock git applicaiton to know - # where the fixtures are. - REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; - }; - in - pkgs.nixosTest { - name = "server integration test"; - nodes = { - registry = { - imports = [ - (import ./nix/wiremock.nix { service = "github-api"; }) - (import ./nix/wiremock.nix { service = "s3-api"; }) - (import ./nix/wiremock.nix { service = "bucket-api"; }) - (import ./nix/wiremock.nix { service = "pursuit-api"; }) - ./nix/module.nix - ]; - config = { - nixpkgs.overlays = [ - # We need to ensure that the server is using the mock git - # binary instead of the real one. We do not, however, want - # to override 'git' in nixpkgs because that would make us - # rebuild everything that depends on git. - (_: prev: { registry.apps.server = prev.registry.apps.server.override { git = prev.gitMock; }; }) - ]; - - virtualisation.graphics = false; - - services.registry-server = { - enable = true; - host = "localhost"; - port = serverPort; - enableCerts = false; - stateDir = stateDir; - envVars = envVars; - }; - - services.wiremock-github-api = { - enable = true; - port = githubPort; - mappings = [ - { - request = { - method = "GET"; - url = "/repos/purescript/purescript-effect/contents/bower.json?ref=v4.0.0"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - type = "file"; - encoding = "base64"; - content = '' - ewogICJuYW1lIjogInB1cmVzY3JpcHQtZWZmZWN0IiwKICAiaG9tZXBhZ2Ui - OiAiaHR0cHM6Ly9naXRodWIuY29tL3B1cmVzY3JpcHQvcHVyZXNjcmlwdC1l - ZmZlY3QiLAogICJsaWNlbnNlIjogIkJTRC0zLUNsYXVzZSIsCiAgInJlcG9z - aXRvcnkiOiB7CiAgICAidHlwZSI6ICJnaXQiLAogICAgInVybCI6ICJodHRw - czovL2dpdGh1Yi5jb20vcHVyZXNjcmlwdC9wdXJlc2NyaXB0LWVmZmVjdC5n - aXQiCiAgfSwKICAiaWdub3JlIjogWwogICAgIioqLy4qIiwKICAgICJib3dl - cl9jb21wb25lbnRzIiwKICAgICJub2RlX21vZHVsZXMiLAogICAgIm91dHB1 - dCIsCiAgICAidGVzdCIsCiAgICAiYm93ZXIuanNvbiIsCiAgICAicGFja2Fn - ZS5qc29uIgogIF0sCiAgImRlcGVuZGVuY2llcyI6IHsKICAgICJwdXJlc2Ny - aXB0LXByZWx1ZGUiOiAiXjYuMC4wIgogIH0KfQo= - ''; - }; - }; - } - { - request = { - method = "GET"; - url = "/repos/purescript/purescript-effect/contents/LICENSE?ref=v4.0.0"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - type = "file"; - encoding = "base64"; - content = '' - Q29weXJpZ2h0IDIwMTggUHVyZVNjcmlwdAoKUmVkaXN0cmlidXRpb24gYW5k - IHVzZSBpbiBzb3VyY2UgYW5kIGJpbmFyeSBmb3Jtcywgd2l0aCBvciB3aXRo - b3V0IG1vZGlmaWNhdGlvbiwKYXJlIHBlcm1pdHRlZCBwcm92aWRlZCB0aGF0 - IHRoZSBmb2xsb3dpbmcgY29uZGl0aW9ucyBhcmUgbWV0OgoKMS4gUmVkaXN0 - cmlidXRpb25zIG9mIHNvdXJjZSBjb2RlIG11c3QgcmV0YWluIHRoZSBhYm92 - ZSBjb3B5cmlnaHQgbm90aWNlLCB0aGlzCmxpc3Qgb2YgY29uZGl0aW9ucyBh - bmQgdGhlIGZvbGxvd2luZyBkaXNjbGFpbWVyLgoKMi4gUmVkaXN0cmlidXRp - b25zIGluIGJpbmFyeSBmb3JtIG11c3QgcmVwcm9kdWNlIHRoZSBhYm92ZSBj - b3B5cmlnaHQgbm90aWNlLAp0aGlzIGxpc3Qgb2YgY29uZGl0aW9ucyBhbmQg - dGhlIGZvbGxvd2luZyBkaXNjbGFpbWVyIGluIHRoZSBkb2N1bWVudGF0aW9u - IGFuZC9vcgpvdGhlciBtYXRlcmlhbHMgcHJvdmlkZWQgd2l0aCB0aGUgZGlz - dHJpYnV0aW9uLgoKMy4gTmVpdGhlciB0aGUgbmFtZSBvZiB0aGUgY29weXJp - Z2h0IGhvbGRlciBub3IgdGhlIG5hbWVzIG9mIGl0cyBjb250cmlidXRvcnMK - bWF5IGJlIHVzZWQgdG8gZW5kb3JzZSBvciBwcm9tb3RlIHByb2R1Y3RzIGRl - cml2ZWQgZnJvbSB0aGlzIHNvZnR3YXJlIHdpdGhvdXQKc3BlY2lmaWMgcHJp - b3Igd3JpdHRlbiBwZXJtaXNzaW9uLgoKVEhJUyBTT0ZUV0FSRSBJUyBQUk9W - SURFRCBCWSBUSEUgQ09QWVJJR0hUIEhPTERFUlMgQU5EIENPTlRSSUJVVE9S - UyAiQVMgSVMiIEFORApBTlkgRVhQUkVTUyBPUiBJTVBMSUVEIFdBUlJBTlRJ - RVMsIElOQ0xVRElORywgQlVUIE5PVCBMSU1JVEVEIFRPLCBUSEUgSU1QTElF - RApXQVJSQU5USUVTIE9GIE1FUkNIQU5UQUJJTElUWSBBTkQgRklUTkVTUyBG - T1IgQSBQQVJUSUNVTEFSIFBVUlBPU0UgQVJFCkRJU0NMQUlNRUQuIElOIE5P - IEVWRU5UIFNIQUxMIFRIRSBDT1BZUklHSFQgSE9MREVSIE9SIENPTlRSSUJV - VE9SUyBCRSBMSUFCTEUgRk9SCkFOWSBESVJFQ1QsIElORElSRUNULCBJTkNJ - REVOVEFMLCBTUEVDSUFMLCBFWEVNUExBUlksIE9SIENPTlNFUVVFTlRJQUwg - REFNQUdFUwooSU5DTFVESU5HLCBCVVQgTk9UIExJTUlURUQgVE8sIFBST0NV - UkVNRU5UIE9GIFNVQlNUSVRVVEUgR09PRFMgT1IgU0VSVklDRVM7CkxPU1Mg - T0YgVVNFLCBEQVRBLCBPUiBQUk9GSVRTOyBPUiBCVVNJTkVTUyBJTlRFUlJV - UFRJT04pIEhPV0VWRVIgQ0FVU0VEIEFORCBPTgpBTlkgVEhFT1JZIE9GIExJ - QUJJTElUWSwgV0hFVEhFUiBJTiBDT05UUkFDVCwgU1RSSUNUIExJQUJJTElU - WSwgT1IgVE9SVAooSU5DTFVESU5HIE5FR0xJR0VOQ0UgT1IgT1RIRVJXSVNF - KSBBUklTSU5HIElOIEFOWSBXQVkgT1VUIE9GIFRIRSBVU0UgT0YgVEhJUwpT - T0ZUV0FSRSwgRVZFTiBJRiBBRFZJU0VEIE9GIFRIRSBQT1NTSUJJTElUWSBP - RiBTVUNIIERBTUFHRS4K - ''; - }; - }; - } - { - request = { - method = "GET"; - url = "/repos/purescript/package-sets/tags"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - name = "psc-0.15.10-20230105"; - commit = { - sha = "090897c992b2b310b1456506308db789672adac1"; - url = "https://api.github.com/repos/purescript/package-sets/commits/090897c992b2b310b1456506308db789672adac1"; - }; - }; - }; - } - ]; - }; - - services.wiremock-s3-api = { - enable = true; - port = s3Port; - files = [ - { - name = "prelude-6.0.1.tar.gz"; - path = ./app/fixtures/registry-storage/prelude-6.0.1.tar.gz; - } - ]; - mappings = [ - { - request = { - method = "GET"; - url = "/prelude/6.0.1.tar.gz"; - }; - response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "prelude-6.0.1.tar.gz"; - }; - } - ]; - }; - - services.wiremock-bucket-api = { - enable = true; - port = bucketPort; - mappings = [ - { - request = { - method = "GET"; - }; - response = { - status = 200; - body = ''prelude/6.0.1.tar.gz16298"abc123"''; - }; - } - # We don't expect that effect-4.0.0 has been uploaded. - { - request = { - method = "PUT"; - url = "/effect/4.0.0.tar.gz?x-id=PutObject"; - }; - response = { - status = 200; - body = ''"abc123"''; - }; - } - # But we do expect that prelude has been uploaded and - # can't be uploaded again. - { - request = { - method = "PUT"; - url = "/prelude/6.0.1.tar.gz?x-id=PutObject"; - }; - response = { - status = 500; - }; - } - ]; - }; - - services.wiremock-pursuit-api = { - enable = true; - port = pursuitPort; - mappings = [ - # Already-published packages, ie. the registry-storage - # tarballs. - { - request = { - method = "GET"; - url = "/packages/purescript-prelude/available-versions"; - }; - response = { - status = 200; - body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; - }; - } - # The result of publishing a package, which we hardcode - # to 201 (success) for now. - { - request = { - method = "POST"; - url = "/packages"; - }; - response = { - status = 201; - }; - } - ]; - }; - }; - }; - client = { - config = { - virtualisation.graphics = false; - }; - }; - }; - - # Test scripts are written in Python: - # https://nixos.org/manual/nixos/stable/index.html#sec-nixos-tests - # - # Note that the python file will be linted, and the test will fail if - # the script fails the lint — if you see an unexpected failure, check - # the nix log for errors. - testScript = - let - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - - mkdir -p ${envVars.REPO_FIXTURES_DIR}/purescript - - git config --global user.email "pacchettibotti@purescript.org" - git config --global user.name "pacchettibotti" - git config --global init.defaultBranch "master" - - # First the registry-index repo - cp -r ${./app/fixtures/registry-index} ${envVars.REPO_FIXTURES_DIR}/purescript/registry-index - - # Then the registry repo - cp -r ${./app/fixtures/registry} ${envVars.REPO_FIXTURES_DIR}/purescript/registry - - # Finally, the legacy package-sets repo - cp -r ${./app/fixtures/package-sets} ${envVars.REPO_FIXTURES_DIR}/purescript/package-sets - - # Next, we set up arbitrary Git repos that should be available - cp -r ${./app/fixtures/github-packages/effect-4.0.0} ${envVars.REPO_FIXTURES_DIR}/purescript/purescript-effect - - # Then we initialize the repos - for REPO in ${envVars.REPO_FIXTURES_DIR}/purescript/*/ - do - pushd $REPO - echo "Initializing $REPO" - git init - git add . - git commit -m "Fixture commit" - # Necessary so you can push to the upstream on the same branch - # as you are currently on. Wrecks the tree for the upstream, - # but this is acceptable for testing. - git config receive.denyCurrentBranch ignore - popd - done - - # Then we fixup the repos that need tags - pushd ${envVars.REPO_FIXTURES_DIR}/purescript/package-sets - git tag -m "psc-0.15.4-20230105" psc-0.15.4-20230105 - popd - - pushd ${envVars.REPO_FIXTURES_DIR}/purescript/purescript-effect - git tag -m "v4.0.0" v4.0.0 - popd - ''; - - publish_effect = pkgs.writeText "publish-effect-4.0.0.json" '' - { - "name": "effect", - "ref": "v4.0.0", - "compiler": "0.15.4", - "location": { - "githubOwner": "purescript", - "githubRepo": "purescript-effect" - } - } - ''; - in - '' - import json - import time - - ########## - # - # SETUP - # - ########## - - # We set up the git fixtures - registry.start() - print(registry.succeed("${setupGitFixtures}/bin/setup-git-fixtures")) - - # We wait for the server to start up and for the client to be able to reach it. - registry.wait_for_unit("wiremock-github-api.service") - registry.wait_for_unit("wiremock-s3-api.service") - registry.wait_for_unit("wiremock-bucket-api.service") - registry.wait_for_unit("wiremock-pursuit-api.service") - registry.wait_for_unit("server.service") - - # Give time for all the various services to come up... - client.start() - client.wait_until_succeeds("${pkgs.curl}/bin/curl --fail-with-body http://registry/api/v1/jobs", timeout=20) - - ########## - # - # TESTS - # - ########## - - # First we initiate the call to publish - print("POST /publish") - publish_result = json.loads(client.succeed("${pkgs.curl}/bin/curl -L -X POST -d '@${publish_effect}' http://registry/api/v1/publish --header 'Content-Type:application/json'")) - print(publish_result) - job_id = publish_result['jobId'] - assert len(job_id) == 36, f"POST /publish should return a 36-char job id, but returned {publish_result}" - - # Then we poll for job results, expecting an eventual 'success'. - try_count = 0 - delay_seconds = 3 - prev_timestamp = "2023-07-29T00:00:00.000Z" - log_level = "DEBUG" - while True: - print(f"Requesting job information for job {job_id}") - poll_result = json.loads(client.succeed(f"${pkgs.curl}/bin/curl -L http://registry/api/v1/jobs/{job_id}?since={prev_timestamp}&level={log_level}")) - print(poll_result) - if "finishedAt" in poll_result: - print("Job has completed!") - success = poll_result['success'] - assert success, f"GET /jobs/{job_id} should return success, but it returned {poll_result}" - break - elif (try_count * delay_seconds) > 60: - raise ValueError(f"Cancelling publish request after {try_count * delay_seconds} seconds, this is too long...") - else: - print(f"Job is still ongoing, retrying in {delay_seconds} seconds...") - time.sleep(delay_seconds) - try_count = try_count + 1 - ''; - }; + integration = pkgs.callPackage ./nix/test/integration.nix { + inherit overlays; + rootPath = ./.; + }; }; - devShells = { - default = pkgs.mkShell { - inherit GIT_LFS_SKIP_SMUDGE; - - name = "registry-dev"; - packages = with pkgs; [ - # All stable PureScript compilers - registry.compilers - registry.purs-versions - - # TODO: Hacky, remove when I can run spago test in a pure env - run-tests-script - - # Deployment + devShells.default = pkgs.mkShell { + name = "registry-dev"; + inherit GIT_LFS_SKIP_SMUDGE; + packages = + with pkgs; + registry-runtime-deps + ++ [ + # Development-specific tools colmena - - # Project tooling - nixFlakes nixfmt-rfc-style - git - git-lfs bash nodejs jq - licensee - coreutils - gzip - gnutar - dhall - dhall-json dbmate - - # Development tooling purs - spago-bin.spago-0_93_44 + spago purs-tidy-unstable purs-backend-es-unstable ]; - }; }; } ) - # Separated because this is not supported for all systems. // { - # Deployment specification for the registry server colmena = { meta = { nixpkgs = import nixpkgs { system = "x86_64-linux"; - overlays = [ - purescript-overlay.overlays.default - mkSpagoDerivation.overlays.default - slimlock.overlays.default - registryOverlay - ]; + inherit overlays; }; }; - # The registry server + registry = { lib, modulesPath, ... }: let @@ -897,27 +220,21 @@ deployment.targetHost = host; deployment.buildOnTarget = true; - # We import the server module and also the digital ocean configuration - # necessary to run in a DO droplet. imports = lib.optional (builtins.pathExists ./do-userdata.nix) ./do-userdata.nix ++ [ (modulesPath + "/virtualisation/digital-ocean-config.nix") - ./nix/module.nix - # Extra config for the deployed server only. + ./nix/registry-server.nix { - # Enable Digital Ocean monitoring services.do-agent.enable = true; - - # Enable the registry server - services.registry-server.enable = true; - services.registry-server.host = host; - services.registry-server.envVars = { - # These env vars are known to Nix so we set them in advance. - # Others, like credentials, must be set in a .env file in - # the state directory, unless there are viable defaults. - inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; + services.registry-server = { + enable = true; + host = host; + envVars = { + # These env vars are known to Nix so we set them in advance. + # Others, like credentials, must be set in a .env file in + # the state directory, unless there are viable defaults. + inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; + }; }; - - # Don't change this. system.stateVersion = "24.05"; } ]; diff --git a/nix/lib/buildRegistryPackage.nix b/nix/lib/buildRegistryPackage.nix new file mode 100644 index 000000000..af32221e8 --- /dev/null +++ b/nix/lib/buildRegistryPackage.nix @@ -0,0 +1,83 @@ +# Helper function for building registry PureScript executables. Compiles a +# PureScript module to an esbuild-bundled Node.js executable. +# +# Returns a function suitable for callPackage that will be auto-injected with: +# - registry-runtime-deps, registry-package-lock (from overlay) +# - Standard build tools (esbuild, nodejs, etc.) +{ + name, + module, + src, + spagoLock, + description, + extraInstall ? "", +}: +{ + lib, + stdenv, + makeWrapper, + esbuild, + writeText, + nodejs, + registry-runtime-deps, + registry-package-lock, +}: +let + # ESM entrypoint that imports and runs the PureScript main function + entrypoint = writeText "entrypoint.js" '' + import { main } from "./output/${module}"; + main(); + ''; +in +stdenv.mkDerivation { + inherit name src; + + nativeBuildInputs = [ + esbuild + makeWrapper + ]; + + buildInputs = [ nodejs ]; + + meta = { + inherit description; + mainProgram = name; + }; + + buildPhase = '' + runHook preBuild + + # Link dependencies and compiled output + ln -s ${registry-package-lock}/node_modules . + cp -r ${spagoLock}/output . + + # Bundle with esbuild + cp ${entrypoint} entrypoint.js + esbuild entrypoint.js \ + --bundle \ + --outfile=${name}.js \ + --platform=node \ + --packages=external + + runHook postBuild + ''; + + installPhase = '' + runHook preInstall + + mkdir -p $out/bin + + # Install the bundled JavaScript + cp ${name}.js $out/${name}.js + + # Create wrapper script with runtime dependencies in PATH + makeWrapper ${nodejs}/bin/node $out/bin/${name} \ + --add-flags "$out/${name}.js" \ + --set NODE_PATH "${registry-package-lock}/node_modules" \ + --prefix PATH : "${lib.makeBinPath registry-runtime-deps}" + + ${extraInstall} + + runHook postInstall + ''; +} diff --git a/nix/lib/default.nix b/nix/lib/default.nix new file mode 100644 index 000000000..799ebd8de --- /dev/null +++ b/nix/lib/default.nix @@ -0,0 +1,5 @@ +{ lib }: +{ + parseEnv = import ./parseEnv.nix { inherit lib; }; + buildRegistryPackage = import ./buildRegistryPackage.nix; +} diff --git a/nix/lib/parseEnv.nix b/nix/lib/parseEnv.nix new file mode 100644 index 000000000..9590ecf4f --- /dev/null +++ b/nix/lib/parseEnv.nix @@ -0,0 +1,19 @@ +# Parse a .env file into a Nix attrset, skipping comments and empty lines +{ lib }: + +path: +let + lines = lib.splitString "\n" (builtins.readFile path); + isContent = + line: builtins.match "^[[:space:]]*$" line == null && builtins.match "^#.*$" line == null; + toKeyValue = + line: + let + match = builtins.match "([^=]+)=(.*)" line; + in + { + name = builtins.elemAt match 0; + value = builtins.elemAt match 1; + }; +in +builtins.listToAttrs (map toKeyValue (builtins.filter isContent lines)) diff --git a/nix/overlays/default.nix b/nix/overlays/default.nix new file mode 100644 index 000000000..499e8f3fd --- /dev/null +++ b/nix/overlays/default.nix @@ -0,0 +1,13 @@ +# Registry overlays +{ + purescript-overlay, + mkSpagoDerivation, + registryLib, + spagoSrc, + npmSrc, +}: +[ + purescript-overlay.overlays.default + mkSpagoDerivation.overlays.default + (import ./registry.nix { inherit registryLib spagoSrc npmSrc; }) +] diff --git a/nix/overlays/registry.nix b/nix/overlays/registry.nix new file mode 100644 index 000000000..f5e769303 --- /dev/null +++ b/nix/overlays/registry.nix @@ -0,0 +1,211 @@ +# Registry packages overlay +# +# This overlay provides all the registry server components, tools, and scripts. +# +# Architecture: +# - Apps (in ./app): Server and GitHub importer that share compiled dependencies +# - Scripts (in ./scripts): CLI utilities that depend on the app code +# - Build optimization: Apps share a pre-compiled output (app) since they +# use the same dependencies and source. Scripts also use this to avoid recompiling. +{ + registryLib, + spagoSrc, + npmSrc, +}: +final: prev: +let + # Shared compiled output for all apps. Both registry-server and registry-github-importer + # are built from ./app with the same dependencies, so we compile once and reuse. + # Scripts in ./scripts depend on registry-app, so they also benefit from this cache. + app = prev.stdenv.mkDerivation { + name = "registry-app-shared"; + src = ../../app; + nativeBuildInputs = [ prev.purs-backend-es-unstable ]; + + phases = [ + "buildPhase" + "installPhase" + ]; + + buildPhase = '' + # Link dependencies + ln -s ${final.registry-package-lock}/node_modules . + ln -s ${final.registry-spago-lock}/output . + + # Compile PureScript to JavaScript using purs-backend-es + purs-backend-es build + ''; + + installPhase = '' + mkdir $out + cp -r output-es $out/output + + # purs-backend-es doesn't copy foreign files, so we need to manually include them + for dir in output/*/; do + subdir=$(basename "$dir") + if [ -f "output/$subdir/foreign.js" ]; then + cp "output/$subdir/foreign.js" "$out/output/$subdir/" + fi + done + ''; + }; + + # Map of script name -> { module, description } + scripts = { + legacy-importer = { + module = "Registry.Scripts.LegacyImporter"; + description = "Import packages from legacy registries (bower, psc-package, etc.)"; + }; + package-deleter = { + module = "Registry.Scripts.PackageDeleter"; + description = "Delete packages from the registry"; + }; + package-set-updater = { + module = "Registry.Scripts.PackageSetUpdater"; + description = "Update package sets"; + }; + package-transferrer = { + module = "Registry.Scripts.PackageTransferrer"; + description = "Transfer packages between storage backends"; + }; + solver = { + module = "Registry.Scripts.Solver"; + description = "Run dependency solver against registry manifests"; + }; + verify-integrity = { + module = "Registry.Scripts.VerifyIntegrity"; + description = "Verify registry and registry-index consistency"; + }; + compiler-versions = { + module = "Registry.Scripts.CompilerVersions"; + description = "List supported compiler versions"; + }; + }; +in +{ + # Use Node.js 20 LTS for all registry components + nodejs = prev.nodejs_20; + + # Pin spago to the version we use + spago = prev.spago-bin.spago-0_93_44; + + # Spago lock: compiled PureScript dependencies for the entire workspace + registry-spago-lock = prev.mkSpagoDerivation { + name = "registry"; + src = spagoSrc; + nativeBuildInputs = [ + final.spago + prev.purescript + ]; + buildPhase = "spago build"; + installPhase = "mkdir $out; cp -r * $out"; + }; + + # NPM lock: JavaScript dependencies (esbuild, node-gyp, etc.) + registry-package-lock = prev.buildNpmPackage { + pname = "purescript-registry"; + version = "0.0.1"; + src = npmSrc; + dontNpmBuild = true; + + nativeBuildInputs = + with prev; + [ + python3 + nodePackages.node-gyp + ] + ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; + + # To update: run `nix build .#server` and copy the hash from the error + npmDepsHash = "sha256-vm6k4DUDWUgPcPeym3YhA1hIg1LbHCDRBSH+7Zs52Uw="; + + installPhase = '' + mkdir -p $out + rm -f node_modules/{registry-app,registry-lib,registry-foreign} + mv node_modules $out/ + ''; + }; + + # Compiler infrastructure + + # All PureScript compilers we support (filtered from purs-bin overlay) + registry-supported-compilers = prev.lib.filterAttrs ( + name: _: builtins.match "^purs-[0-9]+_[0-9]+_[0-9]+$" name != null + ) prev.purs-bin; + + # Executable directory containing all supported compiler versions + registry-compilers = prev.symlinkJoin { + name = "purs-compilers"; + paths = prev.lib.mapAttrsToList ( + name: drv: prev.writeShellScriptBin name ''exec ${drv}/bin/purs "$@"'' + ) final.registry-supported-compilers; + }; + + # Script that prints all supported compiler versions (space-separated) + registry-purs-versions = prev.writeShellScriptBin "purs-versions" '' + echo ${ + prev.lib.concatMapStringsSep " " ( + x: prev.lib.removePrefix "purs-" (builtins.replaceStrings [ "_" ] [ "." ] x) + ) (prev.lib.attrNames final.registry-supported-compilers) + } + ''; + + # Runtime dependencies needed by all registry executables + # Used in: buildRegistryPackage, spago-test check, and devShell + registry-runtime-deps = with prev; [ + final.registry-compilers + final.registry-purs-versions + + dhall + dhall-json + licensee + git + git-lfs + coreutils + gzip + gnutar + ]; + + # Applications + + registry-server = prev.callPackage (registryLib.buildRegistryPackage { + name = "registry-server"; + module = "Registry.App.Server"; + description = "PureScript Registry API server"; + src = ../../app; + spagoLock = app; + extraInstall = "cp -r ${../../db} $out/bin/db"; + }) { }; + + registry-github-importer = prev.callPackage (registryLib.buildRegistryPackage { + name = "registry-github-importer"; + module = "Registry.App.GitHubIssue"; + description = "Import packages from GitHub issues"; + src = ../../app; + spagoLock = app; + }) { }; + + # Scripts - generated from the scripts attrset with module and description +} +// prev.lib.mapAttrs' ( + name: info: + prev.lib.nameValuePair "registry-${name}" ( + prev.callPackage (registryLib.buildRegistryPackage { + name = "registry-${name}"; + module = info.module; + description = info.description; + src = ../../scripts/src; + spagoLock = final.registry-spago-lock; + }) { } + ) +) scripts +// { + # Convenience namespace for bulk access to apps and scripts + registry = { + apps = { + server = final.registry-server; + github-importer = final.registry-github-importer; + }; + scripts = prev.lib.mapAttrs (name: _: final."registry-${name}") scripts; + }; +} diff --git a/nix/module.nix b/nix/registry-server.nix similarity index 54% rename from nix/module.nix rename to nix/registry-server.nix index 70ca42e28..92f301832 100644 --- a/nix/module.nix +++ b/nix/registry-server.nix @@ -6,6 +6,28 @@ }: let cfg = config.services.registry-server; + + # Convert env vars attrset to .env file format + envFile = pkgs.writeText ".env" ( + lib.concatStringsSep "\n" (lib.mapAttrsToList (k: v: "${k}=${toString v}") cfg.envVars) + ); + + serverInit = pkgs.writeShellScriptBin "registry-server-init" '' + mkdir -p ${cfg.stateDir}/db + + set -o allexport + source ${envFile} + [ -f ${cfg.stateDir}/.env ] && source ${cfg.stateDir}/.env + set +o allexport + + export DATABASE_URL="sqlite:${cfg.stateDir}/db/registry.sqlite3" + + cd ${pkgs.registry-server}/bin + ${pkgs.dbmate}/bin/dbmate up + + cd ${cfg.stateDir} + exec ${pkgs.registry-server}/bin/registry-server + ''; in { options.services.registry-server = { @@ -26,13 +48,13 @@ in stateDir = lib.mkOption { type = lib.types.str; default = "/var/lib/registry-server"; - description = "The directory to store the registry server state (database, etc.)"; + description = "The directory to store the registry server state"; }; enableCerts = lib.mkOption { type = lib.types.bool; default = true; - description = "Whether to enable Let's Encrypt certificates for the registry server"; + description = "Whether to enable Let's Encrypt certificates"; }; envVars = lib.mkOption { @@ -40,23 +62,20 @@ in lib.types.either lib.types.str (lib.types.either lib.types.int lib.types.path) ); default = { }; - description = "Environment variables to set for the registry server"; + description = "Environment variables for the registry server"; }; }; config = lib.mkIf cfg.enable { - environment = { - systemPackages = [ - pkgs.vim - pkgs.git - ]; - }; + environment.systemPackages = [ + pkgs.vim + pkgs.git + ]; nix = { gc.automatic = true; settings = { auto-optimise-store = true; - # https://garnix.io/docs/caching substituters = [ "https://cache.garnix.io" ]; trusted-public-keys = [ "cache.garnix.io:CTFPyKSLcx5RMJKfLo5EEPUObbA78b0YQ2DTCJXqr9g=" ]; }; @@ -73,75 +92,37 @@ in users = { mutableUsers = false; - users = let deployers = import ./deployers.nix; in - pkgs.lib.mapAttrs (user: attrs: { + lib.mapAttrs (user: attrs: { isNormalUser = true; home = "/home/${user}"; extraGroups = [ "wheel" ]; - packages = [ - pkgs.rsync - pkgs.git - pkgs.curl - pkgs.coreutils - pkgs.vim + packages = with pkgs; [ + rsync + git + curl + coreutils + vim ]; openssh.authorizedKeys.keys = attrs.sshKeys; }) deployers; }; - systemd.services = - let - # Print an attrset of env vars { ENV_VAR = "value"; } as a newline-delimited - # string of "ENV_VAR=value" lines, then write the text to the Nix store. - printEnv = - vars: - pkgs.lib.concatStringsSep "\n" ( - pkgs.lib.mapAttrsToList ( - name: value: - if (builtins.typeOf value == "int") then "${name}=${toString value}" else "${name}=${value}" - ) vars - ); - defaultEnvFile = pkgs.writeText ".env" (printEnv cfg.envVars); - in - { - server = { - description = "registry server"; - wantedBy = [ - "multi-user.target" - "nginx.service" - ]; - serviceConfig = { - ExecStart = "${pkgs.writeShellScriptBin "registry-server-init" '' - # Ensure the state directory is available and initialize the database - mkdir -p ${cfg.stateDir}/db - - # Initialize environment variables - set -o allexport - source ${defaultEnvFile} - - # If a .env file exists in the stateDir then we will use it instead; - # this overwrites the cfg.envVars settings. - if [ -f ${cfg.stateDir}/.env ]; then - echo "Production .env file found! Values will overwrite the defaults." - source ${cfg.stateDir}/.env - fi - set +o allexport - - export DATABASE_URL="sqlite:${cfg.stateDir}/db/registry.sqlite3" - pushd ${pkgs.registry.apps.server}/bin - ${pkgs.dbmate}/bin/dbmate up - popd - - echo "Starting registry server..." - ${pkgs.registry.apps.server}/bin/registry-server - ''}/bin/registry-server-init"; - }; - }; + systemd.services.server = { + description = "registry server"; + wantedBy = [ + "multi-user.target" + "nginx.service" + ]; + serviceConfig = { + ExecStart = "${serverInit}/bin/registry-server-init"; + Type = "simple"; + Restart = "always"; }; + }; swapDevices = [ { @@ -178,11 +159,7 @@ in PureScript Registry - +

PureScript Registry

@@ -192,9 +169,7 @@ in ''; }; - locations."/api" = { - proxyPass = "http://127.0.0.1:${toString cfg.port}"; - }; + locations."/api".proxyPass = "http://127.0.0.1:${toString cfg.port}"; }; }; }; diff --git a/nix/test-vm.nix b/nix/test-vm.nix deleted file mode 100644 index 916866579..000000000 --- a/nix/test-vm.nix +++ /dev/null @@ -1,37 +0,0 @@ -# Machine configuration for the NixOS virtual machine suitable for testing. -{ - lib, - pkgs, - modulesPath, - ... -}: -{ - imports = [ - "${modulesPath}/virtualisation/qemu-vm.nix" - ./module.nix - ]; - - config = { - # https://github.com/utmapp/UTM/issues/2353 - networking.nameservers = lib.mkIf pkgs.stdenv.isDarwin [ "8.8.8.8" ]; - - # NOTE: Use 'shutdown now' to exit the VM. - services.getty.autologinUser = "root"; - - virtualisation = { - graphics = false; - host = { - inherit pkgs; - }; - forwardPorts = [ - { - from = "host"; - guest.port = 80; - host.port = 8080; - } - ]; - }; - - system.stateVersion = "23.11"; - }; -} diff --git a/nix/test/git-mock.mjs b/nix/test/git-mock.mjs new file mode 100644 index 000000000..df8068972 --- /dev/null +++ b/nix/test/git-mock.mjs @@ -0,0 +1,53 @@ +#!/usr/bin/env node + +/* + +Mock git binary for testing. Detects arguments to 'git' containing a URL +and replaces them with a local filepath. This is a drop-in replacement +for 'git' that should be used in offline / test environments when we only +want fixture data. + +*/ + +import { spawn } from "node:child_process"; + +const repoFixturesDir = process.env.REPO_FIXTURES_DIR; +if (!repoFixturesDir) { + throw new Error("REPO_FIXTURES_DIR is not set, but is required."); +} + +const gitBinary = process.env.GIT_BINARY; +if (!gitBinary) { + throw new Error("GIT_BINARY is not set, but is required."); +} + +// Replace any URL arguments with the local fixtures path. +function replaceIfUrl(arg) { + try { + const url = new URL(arg); + const path = url.pathname.replace(/\.git$/, ""); + const file = "file://" + repoFixturesDir + path; + console.log(file); + return file; + } catch (e) { + // Not a URL, ignore + } + return arg; +} + +const args = process.argv.slice(2); +const modified = args.map(replaceIfUrl); + +const git = spawn(gitBinary, modified); + +git.stdout.on("data", (data) => { + console.log(data.toString("utf8")); +}); + +git.stderr.on("data", (data) => { + console.error(data.toString("utf8")); +}); + +git.on("close", (code) => { + process.exit(code); +}); diff --git a/nix/test/integration.nix b/nix/test/integration.nix new file mode 100644 index 000000000..cc3309981 --- /dev/null +++ b/nix/test/integration.nix @@ -0,0 +1,394 @@ +# VM-based integration test for the registry server. This test deploys the actual service +# to a NixOS VM that matches our deploy environment, and then executes the core publishing +# workflow. The registry relies on several external services and tools that we don't +# control, so the APIs are mocked with WireMock and the Git commands are mocked with a +# wrapper CLI tool called `git-mock`. +# +# The integration test is set up such that the `prelude` package is already published to +# the registry, and the user is now publishing the `effect` package. This can be seen in +# the WireMock and Git fixture setup below. +{ + pkgs, + lib, + overlays, + rootPath, +}: + +if pkgs.stdenv.isDarwin then + pkgs.runCommand "integration-skip" { } '' + echo "Integration tests require Linux VMs, skipping on macOS" > $out + '' +else + let + # Port configuration - single source of truth + ports = { + server = 8080; + github = 9001; + bucket = 9002; + s3 = 9003; + pursuit = 9004; + }; + + stateDir = "/var/lib/registry-server"; + + # Git mock that redirects URLs to local fixtures; this is necessary because otherwise + # commands would reach out to GitHub or the other package origins. + gitMock = pkgs.writeShellScriptBin "git" '' + export GIT_BINARY="${pkgs.git}/bin/git" + exec ${pkgs.nodejs}/bin/node ${./git-mock.mjs} "$@" + ''; + + # WireMock NixOS module to make it easy to mock HTTP services the registry depends on. + wiremockModule = + { service }: + { + pkgs, + config, + lib, + ... + }: + let + cfg = config.services."wiremock-${service}"; + mappingsFormat = pkgs.formats.json { }; + rootDir = + let + mappingsJson = mappingsFormat.generate "mappings.json" { mappings = cfg.mappings; }; + in + pkgs.runCommand "wiremock-root" { } '' + mkdir -p $out/{mappings,__files} + cp ${mappingsJson} $out/mappings/mappings.json + ${lib.concatMapStrings (f: "cp ${f.path} $out/__files/${f.name}\n") cfg.files} + ''; + in + { + options.services."wiremock-${service}" = { + enable = lib.mkEnableOption "WireMock"; + port = lib.mkOption { + type = lib.types.int; + default = 8080; + }; + files = lib.mkOption { + type = lib.types.listOf ( + lib.types.submodule { + options = { + name = lib.mkOption { type = lib.types.str; }; + path = lib.mkOption { type = lib.types.path; }; + }; + } + ); + default = [ ]; + }; + mappings = lib.mkOption { + type = mappingsFormat.type; + default = [ ]; + }; + }; + + config = lib.mkIf cfg.enable { + systemd.services."wiremock-${service}" = { + description = "WireMock ${service}"; + wantedBy = [ "multi-user.target" ]; + serviceConfig = { + ExecStart = "${pkgs.wiremock}/bin/wiremock --port ${toString cfg.port} --root-dir ${rootDir} --disable-banner"; + Type = "simple"; + }; + }; + }; + }; + + parseEnv = import ../lib/parseEnv.nix { inherit lib; }; + envVars = parseEnv (rootPath + "/.env.example") // { + GITHUB_API_URL = "http://localhost:${toString ports.github}"; + S3_API_URL = "http://localhost:${toString ports.s3}"; + S3_BUCKET_URL = "http://localhost:${toString ports.bucket}"; + PURSUIT_API_URL = "http://localhost:${toString ports.pursuit}"; + REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; + }; + + setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' + set -e + mkdir -p ${stateDir}/repo-fixtures/purescript + git config --global user.email "pacchettibotti@purescript.org" + git config --global user.name "pacchettibotti" + git config --global init.defaultBranch "master" + + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} ${stateDir}/repo-fixtures/purescript/ + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 ${stateDir}/repo-fixtures/purescript/purescript-effect + + for repo in ${stateDir}/repo-fixtures/purescript/*/; do + cd "$repo" + git init && git add . && git commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + done + + git -C ${stateDir}/repo-fixtures/purescript/package-sets tag -m "psc-0.15.4-20230105" psc-0.15.4-20230105 + git -C ${stateDir}/repo-fixtures/purescript/purescript-effect tag -m "v4.0.0" v4.0.0 + ''; + + publishPayload = pkgs.writeText "publish-effect.json" ( + builtins.toJSON { + name = "effect"; + ref = "v4.0.0"; + compiler = "0.15.4"; + location = { + githubOwner = "purescript"; + githubRepo = "purescript-effect"; + }; + } + ); + in + pkgs.testers.nixosTest { + name = "registry-integration"; + + testScript = '' + import json + import time + + # Start registry and set up git fixtures + registry.start() + registry.succeed("${setupGitFixtures}/bin/setup-git-fixtures") + + # Wait for all services to be ready + registry.wait_for_unit("wiremock-github-api.service") + registry.wait_for_unit("wiremock-s3-api.service") + registry.wait_for_unit("wiremock-bucket-api.service") + registry.wait_for_unit("wiremock-pursuit-api.service") + registry.wait_for_unit("server.service") + + # Start client and wait for API + client.start() + client.wait_until_succeeds( + "curl --fail-with-body http://registry/api/v1/jobs", + timeout=20 + ) + + # Publish a package + result = json.loads(client.succeed( + "curl -s -X POST -d @${publishPayload} -H 'Content-Type: application/json' " + "http://registry/api/v1/publish" + )) + + job_id = result["jobId"] + assert len(job_id) == 36, f"Expected job ID, got: {result}" + print(f"Job created: {job_id}") + + # Poll for completion + for attempt in range(20): + time.sleep(3) + poll = json.loads(client.succeed( + f"curl -s 'http://registry/api/v1/jobs/{job_id}" + "?since=2023-01-01T00:00:00Z&level=DEBUG'" + )) + + if "finishedAt" in poll: + assert poll["success"], f"Job failed: {poll}" + print("✓ Job completed successfully") + break + else: + raise Exception("Job did not complete in time") + ''; + + # This section defines the machine, configuring the Wiremock instances to + # mock external APIs, overriding Git with the mocked version, and setting + # up the actual Wiremock data to return. The machine is based on the + # same registry-server Nix module we deploy. + nodes.client.virtualisation.graphics = false; + nodes.registry = { + imports = [ + (wiremockModule { service = "github-api"; }) + (wiremockModule { service = "s3-api"; }) + (wiremockModule { service = "bucket-api"; }) + (wiremockModule { service = "pursuit-api"; }) + (rootPath + "/nix/registry-server.nix") + ]; + + # We replace Git in registry-runtime-deps with our custom mocked Git which + # prevents reaching out over the network. We override registry-runtime-deps + # to substitute the mock, which causes registry-server to be rebuilt with it. + nixpkgs.overlays = overlays ++ [ + (_: prev: { + registry-runtime-deps = map ( + pkg: if pkg == prev.git then gitMock else pkg + ) prev.registry-runtime-deps; + }) + ]; + + virtualisation.graphics = false; + + # Finally, we define the running services on the machine: the registry, + # and then the various wiremock servers. + services.registry-server = { + enable = true; + host = "localhost"; + port = ports.server; + enableCerts = false; + inherit stateDir envVars; + }; + + # GitHub API mock - returns base64-encoded content like the real API + services.wiremock-github-api = { + enable = true; + port = ports.github; + mappings = + let + # Helper to create GitHub contents API response, as it returns base64-encoded content + base64Response = + { + url, + fileName, + filePath, + }: + { + request = { + method = "GET"; + inherit url; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + jsonBody = { + type = "file"; + encoding = "base64"; + name = fileName; + path = fileName; + # Base64 encode the file content using Nix builtins + content = builtins.readFile ( + pkgs.runCommand "base64-${fileName}" { } '' + base64 -w 0 ${filePath} > $out + '' + ); + }; + }; + }; + + effectBase64Response = + fileName: + base64Response { + url = "/repos/purescript/purescript-effect/contents/${fileName}?ref=v4.0.0"; + fileName = fileName; + filePath = rootPath + "/app/fixtures/github-packages/effect-4.0.0/${fileName}"; + }; + in + [ + (effectBase64Response "bower.json") + (effectBase64Response "LICENSE") + + { + request = { + method = "GET"; + url = "/repos/purescript/package-sets/tags"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + jsonBody = { + name = "psc-0.15.10-20230105"; + commit = { + sha = "090897c992b2b310b1456506308db789672adac1"; + url = "https://api.github.com/repos/purescript/package-sets/commits/090897c992b2b310b1456506308db789672adac1"; + }; + }; + }; + } + ]; + }; + + # S3 API mock - serves package tarballs + services.wiremock-s3-api = { + enable = true; + port = ports.s3; + files = [ + { + name = "prelude-6.0.1.tar.gz"; + path = rootPath + "/app/fixtures/registry-storage/prelude-6.0.1.tar.gz"; + } + { + name = "type-equality-4.0.1.tar.gz"; + path = rootPath + "/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz"; + } + ]; + mappings = [ + { + request = { + method = "GET"; + url = "/prelude/6.0.1.tar.gz"; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = "prelude-6.0.1.tar.gz"; + }; + } + { + request = { + method = "GET"; + url = "/type-equality/4.0.1.tar.gz"; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = "type-equality-4.0.1.tar.gz"; + }; + } + ]; + }; + + # S3 Bucket API mock - handles upload/list operations + services.wiremock-bucket-api = { + enable = true; + port = ports.bucket; + mappings = [ + { + request.method = "GET"; + response = { + status = 200; + body = ''prelude/6.0.1.tar.gz16298"abc123"type-equality/4.0.1.tar.gz2184"def456"''; + }; + } + { + request = { + method = "PUT"; + url = "/effect/4.0.0.tar.gz?x-id=PutObject"; + }; + response = { + status = 200; + body = ''"abc123"''; + }; + } + { + request = { + method = "PUT"; + url = "/prelude/6.0.1.tar.gz?x-id=PutObject"; + }; + response.status = 500; + } + ]; + }; + + # Pursuit API mock - documentation hosting + services.wiremock-pursuit-api = { + enable = true; + port = ports.pursuit; + mappings = [ + { + request = { + method = "GET"; + url = "/packages/purescript-prelude/available-versions"; + }; + response = { + status = 200; + body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; + }; + } + { + request = { + method = "POST"; + url = "/packages"; + }; + response.status = 201; + } + ]; + }; + }; + + } diff --git a/nix/wiremock.nix b/nix/wiremock.nix deleted file mode 100644 index 3db525674..000000000 --- a/nix/wiremock.nix +++ /dev/null @@ -1,114 +0,0 @@ -{ service }: -{ - pkgs, - config, - lib, - ... -}: -with lib; -let - cfg = config.services."wiremock-${service}"; - mappingsFormat = pkgs.formats.json { }; - rootDir = - let - mappingsJson = mappingsFormat.generate "mappings.json" { mappings = cfg.mappings; }; - in - pkgs.runCommand "wiremock-root" - { - preferLocalBuild = true; - allowSubstitutes = false; - } - '' - mkdir -p $out - cd $out - - mkdir mappings - cp ${mappingsJson} mappings/mappings.json - - mkdir __files - ${lib.concatMapStrings (attrs: "cp ${attrs.path} __files/${attrs.name}") cfg.files} - ''; -in -{ - options.services."wiremock-${service}" = { - enable = mkEnableOption "WireMock"; - - port = mkOption { - type = types.int; - default = 8080; - }; - - verbose = mkOption { - type = types.bool; - default = false; - }; - - files = mkOption { - description = '' - List of files to include in the __files directory for access when stubbing. - ''; - default = [ ]; - example = { - name = "file-name.json"; - path = ""; - }; - }; - - mappings = mkOption { - type = mappingsFormat.type; - description = '' - See the for more information. - ''; - default = [ ]; - example = [ - { - request = { - method = "GET"; - url = "/body"; - }; - response = { - status = 200; - headers."Content-Type" = "text/plain"; - body = "Literal text to put in the body"; - }; - } - { - request = { - method = "GET"; - url = "/json"; - }; - response = { - status = 200; - headers."Content-Type" = "application/json"; - jsonBody = { - someField = "someValue"; - }; - }; - } - ]; - }; - }; - - config = mkIf cfg.enable { - systemd.services."wiremock-${service}" = - let - arguments = [ - "--port ${toString cfg.port}" - "--root-dir ${rootDir}" - "--disable-banner" - ] ++ lib.optional cfg.verbose "--verbose"; - in - { - description = "registry server"; - wantedBy = [ - "multi-user.target" - "nginx.service" - ]; - serviceConfig = { - ExecStart = "${pkgs.writeShellScriptBin "wiremock-${service}-init" '' - ${pkgs.wiremock}/bin/wiremock ${lib.concatStringsSep " " arguments} "$@" - ''}/bin/wiremock-${service}-init"; - }; - }; - }; -} diff --git a/scripts/default.nix b/scripts/default.nix deleted file mode 100644 index 43c043d15..000000000 --- a/scripts/default.nix +++ /dev/null @@ -1,82 +0,0 @@ -{ - makeWrapper, - lib, - stdenv, - esbuild, - nodejs, - writeText, - compilers, - purs-versions, - dhall, - dhall-json, - licensee, - git, - git-lfs, - coreutils, - gzip, - gnutar, - # from the registry at the top level - spago-lock, - package-lock, -}: -let - build-script = - name: module: - stdenv.mkDerivation rec { - inherit name; - src = ./src; - nativeBuildInputs = [ - esbuild - makeWrapper - ]; - buildInputs = [ nodejs ]; - entrypoint = writeText "entrypoint.js" '' - import { main } from "./output/Registry.Scripts.${module}"; - main(); - ''; - buildPhase = '' - ln -s ${package-lock}/js/node_modules . - ln -s ${spago-lock}/output . - cp ${entrypoint} entrypoint.js - esbuild entrypoint.js --bundle --outfile=${name}.js --platform=node --packages=external - ''; - installPhase = '' - mkdir -p $out/bin - - echo "Copying files..." - cp ${name}.js $out/${name}.js - ln -s ${package-lock}/js/node_modules $out - - echo "Creating wrapper script..." - echo '#!/usr/bin/env sh' > $out/bin/${name} - echo 'exec ${nodejs}/bin/node '"$out/${name}.js"' "$@"' >> $out/bin/${name} - chmod +x $out/bin/${name} - ''; - postFixup = '' - wrapProgram $out/bin/${name} \ - --set PATH ${ - lib.makeBinPath [ - compilers - purs-versions - dhall - dhall-json - licensee - git - git-lfs - coreutils - gzip - gnutar - ] - } - ''; - }; -in -{ - legacy-importer = build-script "registry-legacy-importer" "LegacyImporter"; - package-deleter = build-script "registry-package-deleter" "PackageDeleter"; - package-set-updater = build-script "registry-package-set-updater" "PackageSetUpdater"; - package-transferrer = build-script "registry-package-transferrer" "PackageTransferrer"; - solver = build-script "registry-solver" "Solver"; - verify-integrity = build-script "registry-verify-integrity" "VerifyIntegrity"; - compiler-versions = build-script "registry-compiler-versions" "CompilerVersions"; -} diff --git a/shell.nix b/shell.nix deleted file mode 100644 index ceca7e4e4..000000000 --- a/shell.nix +++ /dev/null @@ -1,11 +0,0 @@ -# A compatibility file that allows non-flakes users to still get a development -# shell with `nix-shell`. -(import ( - let - lock = builtins.fromJSON (builtins.readFile ./flake.lock); - in - fetchTarball { - url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; - sha256 = lock.nodes.flake-compat.locked.narHash; - } -) { src = ./.; }).shellNix From fbd108aa2fdf30d9c91883bac5c314eafdfea797 Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Sat, 5 Jul 2025 23:27:56 -0400 Subject: [PATCH 6/6] Run job executor --- app/default.nix | 2 +- app/src/App/Main.purs | 139 ++++++++++++---------- app/src/App/Prelude.purs | 2 +- app/src/App/SQLite.purs | 6 +- app/src/App/{ => Server}/JobExecutor.purs | 22 ++-- app/src/App/Server/Router.purs | 55 +++++++-- flake.nix | 2 +- lib/src/API/V1.purs | 7 -- lib/src/JobType.purs | 1 + lib/src/Operation.purs | 3 +- 10 files changed, 141 insertions(+), 98 deletions(-) rename app/src/App/{ => Server}/JobExecutor.purs (82%) diff --git a/app/default.nix b/app/default.nix index 52c6be83c..536a1e423 100644 --- a/app/default.nix +++ b/app/default.nix @@ -56,7 +56,7 @@ in ]; buildInputs = [ nodejs ]; entrypoint = writeText "entrypoint.js" '' - import { main } from "./output/Registry.App.Server"; + import { main } from "./output/Registry.App.Main"; main(); ''; buildPhase = '' diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index f8bb129bd..19bfac1eb 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -2,87 +2,102 @@ module Registry.App.Main where import Registry.App.Prelude hiding ((/)) -import Data.String as String +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Debug (traceM) import Effect.Aff as Aff import Effect.Class.Console as Console import Fetch.Retry as Fetch.Retry -import HTTPurple (Request, Response) -import HTTPurple as HTTPurple import Node.Process as Process -import Registry.API.V1 (Route) -import Registry.API.V1 as V1 -import Registry.App.Server.Env (ServerEnv, createServerEnv, runEffects) +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor import Registry.App.Server.Router as Router main :: Effect Unit -main = +main = do + traceM 1 createServerEnv # Aff.runAff_ case _ of Left error -> do + traceM 2 Console.log $ "Failed to start server: " <> Aff.message error Process.exit' 1 Right env -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + traceM 3 + Aff.launchAff_ $ healthcheck env + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: ServerEnv -> Aff Unit + healthcheck env = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - loop n = - Fetch.Retry.withRetryRequest env.vars.resourceEnv.healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n + loop n = do + traceM 4 + Fetch.Retry.withRetryRequest env.vars.resourceEnv.healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + traceM 5 + Aff.delay fiveMinutes + loop n - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) + Cancelled | n >= 0 -> do + traceM 6 + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) + Failed error | n >= 0 -> do + traceM 7 + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) + Succeeded { status } | status /= 200, n >= 0 -> do + traceM 8 + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." + Cancelled -> do + traceM 9 + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + Failed error -> do + traceM 10 + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." + Succeeded _ -> do + traceM 11 + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." - loop limit + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + traceM 12 + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port: 8080 - , onStarted - } - { route: V1.routes - , router: runServer env - } - pure unit - where - onStarted :: Effect Unit - onStarted = do - Console.log $ String.joinWith "\n" - [ " ┌───────────────────────────────────────────┐" - , " │ Server now up on port 8080 │" - , " │ │" - , " │ To test, run: │" - , " │ > curl -v localhost:8080/api/v1/jobs │" - , " └───────────────────────────────────────────┘" - ] + loop restartDelay = do + traceM 13 + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC + + traceM 14 + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay - runServer :: ServerEnv -> Request Route -> Aff Response - runServer env request = do - result <- runEffects env (Router.router env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 7a046414d..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -60,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b56575757..208befb9a 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -248,7 +248,6 @@ insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< i type PackageJobDetails = { jobId :: JobId - , jobType :: JobType.JobType , packageName :: PackageName , packageVersion :: Version , payload :: PackageOperation @@ -258,7 +257,6 @@ type PackageJobDetails = type JSPackageJobDetails = { jobId :: String - , jobType :: String , packageName :: String , packageVersion :: String , payload :: String @@ -267,8 +265,7 @@ type JSPackageJobDetails = } packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails -packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do - ty <- JobType.parse jobType +packageJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt } = do name <- PackageName.parse packageName version <- Version.parse packageVersion created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt @@ -276,7 +273,6 @@ packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payloa parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload pure { jobId: JobId jobId - , jobType: ty , packageName: name , packageVersion: version , payload: parsed diff --git a/app/src/App/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs similarity index 82% rename from app/src/App/JobExecutor.purs rename to app/src/App/Server/JobExecutor.purs index e5d29bd95..125a9a7a3 100644 --- a/app/src/App/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,4 +1,4 @@ -module Registry.App.JobExecutor where +module Registry.App.Server.JobExecutor where import Registry.App.Prelude hiding ((/)) @@ -6,12 +6,13 @@ import Control.Parallel as Parallel import Data.DateTime (DateTime) import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff -import Registry.API.V1 (JobId(..)) +import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.Operation as Operation import Run (Run) import Run.Except (EXCEPT) @@ -21,7 +22,7 @@ data JobDetails | PackageSetJob PackageSetJobDetails findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = do +findNextAvailableJob = Db.selectNextPackageJob >>= case _ of Just job -> pure $ Just $ PackageJob job Nothing -> Db.selectNextMatrixJob >>= case _ of @@ -63,24 +64,27 @@ runJobExecutor env = runEffects env do success <- case jobResult of Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." + Log.error $ "Job " <> unwrap jobId <> " timed out." pure false Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err pure false Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." + Log.info $ "Job " <> unwrap jobId <> " succeeded." pure true Db.finishJob { jobId, finishedAt: now, success } loop executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit -executeJob now = case _ of - PackageJob { jobId } -> do - pure unit -- UNIMPLEMENTED +executeJob _ = case _ of + PackageJob { payload: Operation.Publish p } -> + API.publish Nothing p + PackageJob { payload: Operation.Authenticated auth } -> + API.authenticated auth + MatrixJob _details -> pure unit -- UNIMPLEMENTED PackageSetJob _details -> diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 27af29a24..350dcfb86 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -4,7 +4,10 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ +import Data.String as String import Data.UUID.Random as UUID +import Effect.Aff as Aff +import Effect.Class.Console as Console import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status @@ -12,15 +15,44 @@ import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) import Registry.API.V1 as V1 import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log -import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk) +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) import Registry.Operation (PackageOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName import Run (Run) import Run.Except as Run.Except -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port: 8080 + , onStarted + } + { route: V1.routes + , router: runServer + } + where + onStarted :: Effect Unit + onStarted = do + Console.log $ String.joinWith "\n" + [ " ┌───────────────────────────────────────────┐" + , " │ Server now up on port 8080 │" + , " │ │" + , " │ To test, run: │" + , " │ > curl -v localhost:8080/api/v1/jobs │" + , " └───────────────────────────────────────────┘" + ] + + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish @@ -45,22 +77,25 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of HTTPurple.badRequest "Expected transfer operation." Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] + jsonOk (CJ.array V1.jobCodec) [{ jobId: wrap "foo", createdAt: bottom, finishedAt: Nothing, success: true, logs: [] }] Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of + lift (Run.Except.runExcept $ Db.selectJobInfo jobId) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound Right Nothing -> HTTPurple.notFound - Right (Just job) -> do - HTTPurple.emptyResponse Status.ok - -- TODO: Return the job details (will need to update the jobCodec and move the various - -- details into the API module). - -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) + Right (Just job) -> + jsonOk V1.jobCodec + { jobId + , createdAt: job.createdAt + , finishedAt: job.finishedAt + , success: job.success + , logs + } Status, Get -> HTTPurple.emptyResponse Status.ok diff --git a/flake.nix b/flake.nix index 0857da8b4..ac341572a 100644 --- a/flake.nix +++ b/flake.nix @@ -790,7 +790,7 @@ # Give time for all the various services to come up... client.start() - client.wait_until_succeeds("${pkgs.curl}/bin/curl --fail-with-body http://registry/api/v1/jobs", timeout=20) + client.wait_until_succeeds("${pkgs.curl}/bin/curl --fail-with-body http://registry/api/v1/jobs", timeout=120) ########## # diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 31c15866c..4bae692f5 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,9 +15,6 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.JobType as JobType -import Registry.PackageName (PackageName) -import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG @@ -67,8 +64,6 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType.JobType - , packageName :: PackageName , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -78,8 +73,6 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: JobType.codec - , packageName: PackageName.codec , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs index b8dceaf38..dbc4eaf01 100644 --- a/lib/src/JobType.purs +++ b/lib/src/JobType.purs @@ -1,6 +1,7 @@ module Registry.JobType where import Prelude + import Data.Codec.JSON as CJ import Data.Codec.JSON.Sum as CJ.Sum import Data.Either (Either(..), hush) diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 518c1a6de..262ceb3db 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -30,8 +30,7 @@ module Registry.Operation , publishCodec , transferCodec , unpublishCodec - ) - where + ) where import Prelude