diff --git a/.env.example b/.env.example index 6a2bad0ff..928c8bd79 100644 --- a/.env.example +++ b/.env.example @@ -4,6 +4,7 @@ OTA_ENGINE_GITHUB_TOKEN=your_github_token_here OTA_ENGINE_GITLAB_TOKEN=your_gitlab_token_here OTA_ENGINE_GITLAB_RELEASES_TOKEN=your_gitlab_releases_token_here +OTA_ENGINE_DATAGOUV_API_KEY=your_datagouv_api_key_here OTA_ENGINE_SENDINBLUE_API_KEY=your_sendinblue_api_key_here OTA_ENGINE_SMTP_PASSWORD=your_smtp_password_here diff --git a/CHANGELOG.md b/CHANGELOG.md index 6601e83c0..b74b24d68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,15 @@ All changes that impact users of this module are documented in this file, in the [Common Changelog](https://common-changelog.org) format with some additional specifications defined in the CONTRIBUTING file. This codebase adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## Unreleased [minor] + +> Development of this release was supported by the [French Ministry for Foreign Affairs](https://www.diplomatie.gouv.fr/fr/politique-etrangere-de-la-france/diplomatie-numerique/) through its ministerial [State Startups incubator](https://beta.gouv.fr/startups/open-terms-archive.html) under the aegis of the Ambassador for Digital Affairs. + +### Added + +- Add support for publishing datasets to data.gouv.fr; configure `dataset.datagouv.datasetId` or `dataset.datagouv.organizationIdOrSlug` in configuration file and set `OTA_ENGINE_DATAGOUV_API_KEY` environment variable +- Add ability to publish datasets to multiple platforms simultaneously; datasets can now be published to GitHub (or GitLab) and data.gouv.fr in parallel + ## 10.0.1 - 2025-11-24 _Full changeset and discussions: [#1208](https://github.com/OpenTermsArchive/engine/pull/1208)._ diff --git a/bin/ota-dataset.js b/bin/ota-dataset.js index 060649187..43328f6d8 100755 --- a/bin/ota-dataset.js +++ b/bin/ota-dataset.js @@ -11,9 +11,9 @@ import logger from '../src/logger/index.js'; program .name('ota dataset') - .description('Export the versions dataset into a ZIP file and optionally publish it to GitHub releases') + .description('Export the versions dataset into a ZIP file and optionally publish it to GitHub releases, GitLab releases, or data.gouv.fr') .option('-f, --file ', 'file name of the generated dataset') - .option('-p, --publish', 'publish dataset to GitHub releases on versions repository. Mandatory authentication to GitHub is provided through the `OTA_ENGINE_GITHUB_TOKEN` environment variable') + .option('-p, --publish', 'publish dataset. Supports GitHub releases (OTA_ENGINE_GITHUB_TOKEN), GitLab releases (OTA_ENGINE_GITLAB_TOKEN), or data.gouv.fr (OTA_ENGINE_DATAGOUV_API_KEY + config)') .option('-r, --remove-local-copy', 'remove local copy of dataset after publishing. Works only in combination with --publish option') .option('--schedule', 'schedule automatic dataset generation'); diff --git a/config/default.json b/config/default.json index 637e9dafb..3cac8ea15 100644 --- a/config/default.json +++ b/config/default.json @@ -56,7 +56,7 @@ } }, "dataset": { - "title": "sandbox", + "title": "Sandbox collection dataset", "versionsRepositoryURL": "https://github.com/OpenTermsArchive/sandbox-declarations", "publishingSchedule": "30 8 * * MON" } diff --git a/scripts/dataset/assets/README.template.js b/scripts/dataset/assets/README.template.js index 1c63d117c..abfc87e6b 100644 --- a/scripts/dataset/assets/README.template.js +++ b/scripts/dataset/assets/README.template.js @@ -14,7 +14,7 @@ export function title({ releaseDate }) { const title = config.get('@opentermsarchive/engine.dataset.title'); - return `${title} — ${releaseDate} dataset`; + return `${title} — ${releaseDate}`; } export function body({ servicesCount, firstVersionDate, lastVersionDate }) { diff --git a/scripts/dataset/export/test/fixtures/dataset/README.md b/scripts/dataset/export/test/fixtures/dataset/README.md index b186c3829..16cd5ec89 100644 --- a/scripts/dataset/export/test/fixtures/dataset/README.md +++ b/scripts/dataset/export/test/fixtures/dataset/README.md @@ -1,4 +1,4 @@ -# Open Terms Archive — sandbox — January 1, 2022 dataset +# Open Terms Archive — sandbox — January 1, 2022 This dataset consolidates the contractual documents of 2 service providers, in all their versions that were accessible online between January 1, 2021 and January 6, 2022. diff --git a/scripts/dataset/index.js b/scripts/dataset/index.js index 4c739686d..67390d95e 100644 --- a/scripts/dataset/index.js +++ b/scripts/dataset/index.js @@ -9,7 +9,7 @@ import publishRelease from './publish/index.js'; export async function release({ shouldPublish, shouldRemoveLocalCopy, fileName }) { const releaseDate = new Date(); - const archiveName = fileName || `dataset-${config.get('@opentermsarchive/engine.dataset.title')}-${releaseDate.toISOString().replace(/T.*/, '')}`; + const archiveName = fileName || `${config.get('@opentermsarchive/engine.dataset.title').toLowerCase().replace(/[^a-zA-Z0-9.\-_]/g, '-')}-${releaseDate.toISOString().replace(/T.*/, '')}`; const archivePath = `${path.basename(archiveName, '.zip')}.zip`; // allow to pass filename or filename.zip as the archive name and have filename.zip as the result name logger.info('Start exporting dataset…'); @@ -24,13 +24,18 @@ export async function release({ shouldPublish, shouldRemoveLocalCopy, fileName } logger.info('Start publishing dataset…'); - const releaseUrl = await publishRelease({ + const results = await publishRelease({ archivePath, releaseDate, stats, }); - logger.info(`Dataset published to ${releaseUrl}`); + if (results.length > 0) { + logger.info('Dataset published to following platforms:'); + results.forEach(result => { + logger.info(` - ${result.platform}: ${result.url}`); + }); + } if (!shouldRemoveLocalCopy) { return; diff --git a/scripts/dataset/logger/index.js b/scripts/dataset/logger/index.js index 802eb7cb9..dc14b7a4d 100644 --- a/scripts/dataset/logger/index.js +++ b/scripts/dataset/logger/index.js @@ -8,13 +8,35 @@ const { combine, timestamp, printf, colorize } = winston.format; logger.format = combine( colorize(), timestamp({ format: 'YYYY-MM-DDTHH:mm:ssZ' }), - printf(({ level, message, counter, hash, timestamp }) => { - const prefix = counter && hash ? `${counter.toString().padEnd(6)} ${hash.padEnd(40)}` : ''; + printf(({ level, message, counter, hash, timestamp, module }) => { + let prefix = counter && hash ? `${counter.toString().padEnd(6)} ${hash.padEnd(40)}` : ''; const timestampPrefix = config.get('@opentermsarchive/engine.logger.timestampPrefix') ? `${timestamp} ` : ''; - return `${timestampPrefix}${level.padEnd(15)} ${prefix.padEnd(50)} ${message}`; + prefix = module ? `${module} ${prefix}` : prefix; + + const levelStr = level.padEnd(15); + let coloredLevel = levelStr; + let coloredMessage = message; + + if (level.includes('warn')) { + coloredLevel = `\x1b[33m${levelStr}\x1b[0m`; + coloredMessage = `\x1b[33m${message}\x1b[0m`; + } else if (level.includes('error')) { + coloredLevel = `\x1b[31m${levelStr}\x1b[0m`; + coloredMessage = `\x1b[31m${message}\x1b[0m`; + } + + return `${timestampPrefix} ${coloredLevel} ${prefix.padEnd(50)} ${coloredMessage}`; }), ); +export function createModuleLogger(moduleName) { + return { + info: message => logger.info(message, { module: moduleName }), + warn: message => logger.warn(message, { module: moduleName }), + error: message => logger.error(message, { module: moduleName }), + }; +} + export default logger; diff --git a/scripts/dataset/publish/datagouv/dataset.js b/scripts/dataset/publish/datagouv/dataset.js new file mode 100644 index 000000000..88c9652bf --- /dev/null +++ b/scripts/dataset/publish/datagouv/dataset.js @@ -0,0 +1,234 @@ +import fsApi from 'fs'; +import path from 'path'; + +import FormData from 'form-data'; +import nodeFetch from 'node-fetch'; + +import { createModuleLogger } from '../../logger/index.js'; + +const logger = createModuleLogger('datagouv'); + +const DATASET_LICENSE = 'odc-odbl'; +const DEFAULT_RESOURCE_DESCRIPTION = 'See README.md inside the archive for dataset structure and usage information.'; + +const routes = { + dataset: (apiBaseUrl, datasetId) => `${apiBaseUrl}/datasets/${datasetId}/`, + datasets: apiBaseUrl => `${apiBaseUrl}/datasets/`, + datasetUpload: (apiBaseUrl, datasetId) => `${apiBaseUrl}/datasets/${datasetId}/upload/`, + resource: (apiBaseUrl, datasetId, resourceId) => `${apiBaseUrl}/datasets/${datasetId}/resources/${resourceId}/`, + resourceUpload: (apiBaseUrl, datasetId, resourceId) => `${apiBaseUrl}/datasets/${datasetId}/resources/${resourceId}/upload/`, + organization: (apiBaseUrl, organizationIdOrSlug) => `${apiBaseUrl}/organizations/${organizationIdOrSlug}/`, + organizationDatasets: (apiBaseUrl, organizationId) => `${apiBaseUrl}/organizations/${organizationId}/datasets/?page_size=100`, +}; + +export async function getOrganization({ apiBaseUrl, headers, organizationIdOrSlug }) { + logger.info(`Fetching organization: ${organizationIdOrSlug}…`); + + const orgResponse = await nodeFetch(routes.organization(apiBaseUrl, organizationIdOrSlug), { headers }); + + if (!orgResponse.ok) { + const errorText = await orgResponse.text(); + + throw new Error(`Failed to retrieve organization: ${orgResponse.status} ${orgResponse.statusText} - ${errorText}`); + } + + const orgData = await orgResponse.json(); + + logger.info(`Found organization: ${orgData.name} (ID: ${orgData.id})`); + + return orgData; +} + +export async function getDataset({ apiBaseUrl, headers, datasetId }) { + const datasetResponse = await nodeFetch(routes.dataset(apiBaseUrl, datasetId), { headers }); + + if (!datasetResponse.ok) { + const errorText = await datasetResponse.text(); + const error = new Error(`Failed to retrieve dataset: ${datasetResponse.status} ${datasetResponse.statusText} - ${errorText}`); + + error.statusCode = datasetResponse.status; + throw error; + } + + const datasetData = await datasetResponse.json(); + + return datasetData; +} + +export async function findDatasetByTitle({ apiBaseUrl, headers, organizationId, title }) { + logger.info(`Searching for dataset with title "${title}" in organization…`); + + const searchResponse = await nodeFetch(routes.organizationDatasets(apiBaseUrl, organizationId), { headers }); + + if (!searchResponse.ok) { + const errorText = await searchResponse.text(); + + throw new Error(`Failed to search for datasets: ${searchResponse.status} ${searchResponse.statusText} - ${errorText}`); + } + + const searchData = await searchResponse.json(); + + const dataset = searchData.data.find(ds => ds.title === title); + + if (dataset) { + logger.info(`Found existing dataset: ${dataset.title} (ID: ${dataset.id})`); + + return dataset; + } + + logger.info('No existing dataset found with this title'); + + return null; +} + +export async function createDataset({ apiBaseUrl, headers, organizationId, title, description, license, frequency }) { + logger.info(`Creating new dataset: ${title}…`); + + const createResponse = await nodeFetch(routes.datasets(apiBaseUrl), { + method: 'POST', + headers: { + ...headers, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + title, + description, + organization: organizationId, + license, + frequency, + }), + }); + + if (!createResponse.ok) { + const errorText = await createResponse.text(); + + throw new Error(`Failed to create dataset: ${createResponse.status} ${createResponse.statusText} - ${errorText}`); + } + + const dataset = await createResponse.json(); + + logger.info(`Dataset created successfully: ${dataset.title} (ID: ${dataset.id})`); + + return dataset; +} + +export async function updateDatasetMetadata({ apiBaseUrl, headers, datasetId, title, description, stats, frequency }) { + const updatePayload = { + title, + description, + license: DATASET_LICENSE, + frequency, + }; + + if (stats?.firstVersionDate && stats?.lastVersionDate) { + updatePayload.temporal_coverage = { + start: stats.firstVersionDate.toISOString(), + end: stats.lastVersionDate.toISOString(), + }; + } + + const updateResponse = await nodeFetch(routes.dataset(apiBaseUrl, datasetId), { + method: 'PUT', + headers: { + ...headers, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(updatePayload), + }); + + if (!updateResponse.ok) { + const errorText = await updateResponse.text(); + const error = new Error(`Failed to update dataset metadata: ${updateResponse.status} ${updateResponse.statusText} - ${errorText}`); + + error.statusCode = updateResponse.status; + throw error; + } + + logger.info('Dataset metadata updated successfully'); +} + +export async function uploadResource({ apiBaseUrl, headers, datasetId, archivePath }) { + logger.info('Uploading dataset archive…'); + + const { formData, fileName } = createFormDataForFile(archivePath); + + const uploadResponse = await nodeFetch(routes.datasetUpload(apiBaseUrl, datasetId), { + method: 'POST', + headers: { ...formData.getHeaders(), ...headers }, + body: formData, + }); + + if (!uploadResponse.ok) { + const errorText = await uploadResponse.text(); + + throw new Error(`Failed to upload dataset file: ${uploadResponse.status} ${uploadResponse.statusText} - ${errorText}`); + } + + const uploadResult = await uploadResponse.json(); + + logger.info(`Dataset file uploaded successfully with resource ID: ${uploadResult.id}`); + + return { resourceId: uploadResult.id, fileName }; +} + +export async function replaceResourceFile({ apiBaseUrl, headers, datasetId, resourceId, archivePath }) { + logger.info(`Replacing file for existing resource ID: ${resourceId}…`); + + const { formData, fileName } = createFormDataForFile(archivePath); + + const uploadResponse = await nodeFetch(routes.resourceUpload(apiBaseUrl, datasetId, resourceId), { + method: 'POST', + headers: { ...formData.getHeaders(), ...headers }, + body: formData, + }); + + if (!uploadResponse.ok) { + const errorText = await uploadResponse.text(); + + throw new Error(`Failed to replace resource file: ${uploadResponse.status} ${uploadResponse.statusText} - ${errorText}`); + } + + const uploadResult = await uploadResponse.json(); + + logger.info('Resource file replaced successfully'); + + return { resourceId: uploadResult.id, fileName }; +} + +export async function updateResourceMetadata({ apiBaseUrl, headers, datasetId, resourceId, fileName }) { + logger.info('Updating resource metadata…'); + + const resourceUpdateResponse = await nodeFetch(routes.resource(apiBaseUrl, datasetId, resourceId), { + method: 'PUT', + headers: { ...headers, 'Content-Type': 'application/json' }, + body: JSON.stringify({ + title: fileName, + description: DEFAULT_RESOURCE_DESCRIPTION, + filetype: 'file', + format: 'zip', + mime: 'application/zip', + }), + }); + + if (!resourceUpdateResponse.ok) { + const errorText = await resourceUpdateResponse.text(); + + throw new Error(`Failed to update resource metadata: ${resourceUpdateResponse.status} ${resourceUpdateResponse.statusText} - ${errorText}`); + } + + logger.info('Resource metadata updated successfully'); +} + +function createFormDataForFile(archivePath) { + const formData = new FormData(); + const fileName = path.basename(archivePath); + const fileStats = fsApi.statSync(archivePath); + + formData.append('file', fsApi.createReadStream(archivePath), { + filename: fileName, + contentType: 'application/zip', + knownLength: fileStats.size, + }); + + return { formData, fileName }; +} diff --git a/scripts/dataset/publish/datagouv/index.js b/scripts/dataset/publish/datagouv/index.js new file mode 100644 index 000000000..c41cf7f41 --- /dev/null +++ b/scripts/dataset/publish/datagouv/index.js @@ -0,0 +1,82 @@ +import config from 'config'; + +import * as readme from '../../assets/README.template.js'; +import { createModuleLogger } from '../../logger/index.js'; + +import { updateDatasetMetadata, uploadResource, replaceResourceFile, updateResourceMetadata, getDataset, getOrganization, findDatasetByTitle, createDataset } from './dataset.js'; + +const logger = createModuleLogger('datagouv'); + +const PRODUCTION_API_BASE_URL = 'https://www.data.gouv.fr/api/1'; +const DEMO_API_BASE_URL = 'https://demo.data.gouv.fr/api/1'; +const DATASET_LICENSE = 'odc-odbl'; + +export default async function publish({ archivePath, stats }) { + const { datasetId, organizationIdOrSlug, apiBaseUrl, headers, datasetTitle, frequency } = loadConfiguration(); + const description = readme.body(stats); + + const dataset = datasetId + ? await getDataset({ apiBaseUrl, headers, datasetId }) + : await ensureDatasetExists({ apiBaseUrl, headers, organizationIdOrSlug, datasetTitle, description, frequency }); + + await updateDatasetMetadata({ apiBaseUrl, headers, datasetId: dataset.id, title: datasetTitle, description, stats, frequency }); + + const { resourceId, fileName } = await handleResourceUpload({ apiBaseUrl, headers, datasetId: dataset.id, dataset, archivePath }); + + await updateResourceMetadata({ apiBaseUrl, headers, datasetId: dataset.id, resourceId, fileName }); + + logger.info(`Dataset published successfully: ${dataset.page}`); + + return dataset.page; +} + +function loadConfiguration() { + const apiKey = process.env.OTA_ENGINE_DATAGOUV_API_KEY; + + if (!apiKey) { + throw new Error('OTA_ENGINE_DATAGOUV_API_KEY environment variable is required for data.gouv.fr publishing'); + } + + const datasetId = config.has('@opentermsarchive/engine.dataset.datagouv.datasetId') && config.get('@opentermsarchive/engine.dataset.datagouv.datasetId'); + const organizationIdOrSlug = config.has('@opentermsarchive/engine.dataset.datagouv.organizationIdOrSlug') && config.get('@opentermsarchive/engine.dataset.datagouv.organizationIdOrSlug'); + + if (!datasetId && !organizationIdOrSlug) { + throw new Error('Either datasetId or organizationIdOrSlug is required in config at @opentermsarchive/engine.dataset.datagouv'); + } + + const datasetTitle = config.get('@opentermsarchive/engine.dataset.title'); + const frequency = config.has('@opentermsarchive/engine.dataset.datagouv.frequency') && config.get('@opentermsarchive/engine.dataset.datagouv.frequency'); + const useDemo = config.has('@opentermsarchive/engine.dataset.datagouv.useDemo') && config.get('@opentermsarchive/engine.dataset.datagouv.useDemo'); + const apiBaseUrl = useDemo ? DEMO_API_BASE_URL : PRODUCTION_API_BASE_URL; + + if (useDemo) { + logger.warn('Using demo.data.gouv.fr environment for testing'); + } + + const headers = { 'X-API-KEY': apiKey }; + + return { datasetId, organizationIdOrSlug, apiBaseUrl, headers, datasetTitle, frequency }; +} + +async function ensureDatasetExists({ apiBaseUrl, headers, organizationIdOrSlug, datasetTitle, description, frequency }) { + const organization = await getOrganization({ apiBaseUrl, headers, organizationIdOrSlug }); + let dataset = await findDatasetByTitle({ apiBaseUrl, headers, organizationId: organization.id, title: datasetTitle }); + + if (!dataset) { + dataset = await createDataset({ apiBaseUrl, headers, organizationId: organization.id, title: datasetTitle, description, license: DATASET_LICENSE, frequency }); + } + + return dataset; +} + +function handleResourceUpload({ apiBaseUrl, headers, datasetId, dataset, archivePath }) { + if (dataset?.resources?.length > 0) { + const existingResource = dataset.resources[0]; + + logger.info(`Found existing resource: ${existingResource.title} (ID: ${existingResource.id})`); + + return replaceResourceFile({ apiBaseUrl, headers, datasetId, resourceId: existingResource.id, archivePath }); + } + + return uploadResource({ apiBaseUrl, headers, datasetId, archivePath }); +} diff --git a/scripts/dataset/publish/github/index.js b/scripts/dataset/publish/github/index.js index 6d83ac47a..5aaec8ad5 100644 --- a/scripts/dataset/publish/github/index.js +++ b/scripts/dataset/publish/github/index.js @@ -1,19 +1,23 @@ import fsApi from 'fs'; import path from 'path'; -import url from 'url'; import config from 'config'; import { Octokit } from 'octokit'; // eslint-disable-line import/no-unresolved import * as readme from '../../assets/README.template.js'; +import { createModuleLogger } from '../../logger/index.js'; + +const logger = createModuleLogger('github'); export default async function publish({ archivePath, releaseDate, stats }) { const octokit = new Octokit({ auth: process.env.OTA_ENGINE_GITHUB_TOKEN }); - const [ owner, repo ] = url.parse(config.get('@opentermsarchive/engine.dataset.versionsRepositoryURL')).pathname.split('/').filter(component => component); + const [ owner, repo ] = new URL(config.get('@opentermsarchive/engine.dataset.versionsRepositoryURL')).pathname.split('/').filter(component => component); const tagName = `${path.basename(archivePath, path.extname(archivePath))}`; // use archive filename as Git tag + logger.info(`Creating release for ${owner}/${repo}…`); + const { data: { upload_url: uploadUrl, html_url: releaseUrl } } = await octokit.rest.repos.createRelease({ owner, repo, @@ -22,6 +26,9 @@ export default async function publish({ archivePath, releaseDate, stats }) { body: readme.body(stats), }); + logger.info(`Release created successfully with tag: ${tagName}`); + logger.info('Uploading release asset…'); + await octokit.rest.repos.uploadReleaseAsset({ data: fsApi.readFileSync(archivePath), headers: { @@ -32,5 +39,7 @@ export default async function publish({ archivePath, releaseDate, stats }) { url: uploadUrl, }); + logger.info(`Release asset uploaded successfully: ${path.basename(archivePath)}`); + return releaseUrl; } diff --git a/scripts/dataset/publish/gitlab/index.js b/scripts/dataset/publish/gitlab/index.js index ba8f2f3d9..7e45a907c 100644 --- a/scripts/dataset/publish/gitlab/index.js +++ b/scripts/dataset/publish/gitlab/index.js @@ -8,7 +8,9 @@ import nodeFetch from 'node-fetch'; import GitLab from '../../../../src/reporter/gitlab/index.js'; import * as readme from '../../assets/README.template.js'; -import logger from '../../logger/index.js'; +import { createModuleLogger } from '../../logger/index.js'; + +const logger = createModuleLogger('gitlab'); dotenv.config({ quiet: true }); diff --git a/scripts/dataset/publish/index.js b/scripts/dataset/publish/index.js index 6ed8ead0f..79f752a12 100644 --- a/scripts/dataset/publish/index.js +++ b/scripts/dataset/publish/index.js @@ -1,15 +1,49 @@ +import config from 'config'; + +import logger from '../logger/index.js'; + +import publishDataGouv from './datagouv/index.js'; import publishGitHub from './github/index.js'; import publishGitLab from './gitlab/index.js'; -export default function publishRelease({ archivePath, releaseDate, stats }) { +export default async function publishRelease({ archivePath, releaseDate, stats }) { + const platforms = []; + // If both GitHub and GitLab tokens are defined, GitHub takes precedence if (process.env.OTA_ENGINE_GITHUB_TOKEN) { - return publishGitHub({ archivePath, releaseDate, stats }); + platforms.push({ name: 'GitHub', publish: () => publishGitHub({ archivePath, releaseDate, stats }) }); + } else if (process.env.OTA_ENGINE_GITLAB_TOKEN) { + platforms.push({ name: 'GitLab', publish: () => publishGitLab({ archivePath, releaseDate, stats }) }); + } + + if (process.env.OTA_ENGINE_DATAGOUV_API_KEY && (config.has('@opentermsarchive/engine.dataset.datagouv.datasetId') || config.has('@opentermsarchive/engine.dataset.datagouv.organizationIdOrSlug'))) { + platforms.push({ name: 'data.gouv.fr', publish: () => publishDataGouv({ archivePath, releaseDate, stats }) }); + } + + if (!platforms.length) { + throw new Error('No publishing platform configured. Please configure at least one of: GitHub (OTA_ENGINE_GITHUB_TOKEN), GitLab (OTA_ENGINE_GITLAB_TOKEN), or data.gouv.fr (OTA_ENGINE_DATAGOUV_API_KEY + datasetId or organizationIdOrSlug in config).'); } - if (process.env.OTA_ENGINE_GITLAB_TOKEN) { - return publishGitLab({ archivePath, releaseDate, stats }); + const results = await Promise.allSettled(platforms.map(async platform => { + const url = await platform.publish(); + + return { platform: platform.name, url }; + })); + + const succeeded = results.filter(result => result.status === 'fulfilled'); + const failed = results.filter(result => result.status === 'rejected'); + + if (failed.length) { + let errorMessage = !succeeded.length ? 'All platforms failed to publish:' : 'Some platforms failed to publish:'; + + failed.forEach(rejectedResult => { + const index = results.indexOf(rejectedResult); + + errorMessage += `\n - ${platforms[index].name}: ${rejectedResult.reason.message}`; + }); + + logger.error(errorMessage); } - throw new Error('No GitHub nor GitLab token found in environment variables (OTA_ENGINE_GITHUB_TOKEN or OTA_ENGINE_GITLAB_TOKEN). Cannot publish the dataset without authentication.'); + return succeeded.map(result => result.value); }