diff --git a/nextflow/module.properties b/nextflow/module.properties
index be867ce9..e79c9076 100644
--- a/nextflow/module.properties
+++ b/nextflow/module.properties
@@ -5,4 +5,4 @@ Description: This module provides the functionality \
License: Apache 2.0
LicenseURL: http://www.apache.org/licenses/LICENSE-2.0
SupportedDatabases: pgsql
-ManageVersion: false
+ManageVersion: true
diff --git a/nextflow/resources/schemas/dbscripts/nextflow.xml b/nextflow/resources/schemas/dbscripts/nextflow.xml
new file mode 100644
index 00000000..01709064
--- /dev/null
+++ b/nextflow/resources/schemas/dbscripts/nextflow.xml
@@ -0,0 +1,28 @@
+
+
+
+
+ Invocation counts to ensure unique NextFlow run names
+
+
+
+
+
+
\ No newline at end of file
diff --git a/nextflow/resources/schemas/dbscripts/postgresql/nextflow-0.000-25.000.sql b/nextflow/resources/schemas/dbscripts/postgresql/nextflow-0.000-25.000.sql
new file mode 100644
index 00000000..5f6a43a3
--- /dev/null
+++ b/nextflow/resources/schemas/dbscripts/postgresql/nextflow-0.000-25.000.sql
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2025 LabKey Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+CREATE SCHEMA nextflow;
+
+CREATE TABLE nextflow.Job
+(
+ JobId INTEGER NOT NULL,
+ InvocationCount INTEGER NOT NULL,
+ CONSTRAINT PK_Job PRIMARY KEY (JobId),
+ CONSTRAINT FK_Job_JobId FOREIGN KEY (JobId) REFERENCES pipeline.StatusFiles (RowId) ON DELETE CASCADE -- Automatically clean up when a job is deleted
+);
diff --git a/nextflow/src/org/labkey/nextflow/NextFlowController.java b/nextflow/src/org/labkey/nextflow/NextFlowController.java
index ea758886..736b8e4f 100644
--- a/nextflow/src/org/labkey/nextflow/NextFlowController.java
+++ b/nextflow/src/org/labkey/nextflow/NextFlowController.java
@@ -3,6 +3,7 @@
import lombok.Getter;
import lombok.Setter;
import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.Logger;
import org.labkey.api.action.ApiResponse;
import org.labkey.api.action.ApiSimpleResponse;
import org.labkey.api.action.FormViewAction;
@@ -12,7 +13,6 @@
import org.labkey.api.data.PropertyManager;
import org.labkey.api.data.PropertyStore;
import org.labkey.api.pipeline.PipeRoot;
-import org.labkey.api.pipeline.PipelineJob;
import org.labkey.api.pipeline.PipelineProvider;
import org.labkey.api.pipeline.PipelineService;
import org.labkey.api.pipeline.PipelineStatusUrls;
@@ -31,6 +31,7 @@
import org.labkey.api.util.Path;
import org.labkey.api.util.URLHelper;
import org.labkey.api.util.element.Select;
+import org.labkey.api.util.logging.LogHelper;
import org.labkey.api.view.HtmlView;
import org.labkey.api.view.JspView;
import org.labkey.api.view.NavTree;
@@ -64,6 +65,8 @@ public class NextFlowController extends SpringActionController
private static final DefaultActionResolver _actionResolver = new DefaultActionResolver(NextFlowController.class);
public static final String NAME = "nextflow";
+ protected static final Logger LOG = LogHelper.getLogger(NextFlowPipelineJob.class, "LabKey UI and API for NextFlow usage");
+
public NextFlowController()
{
setActionResolver(_actionResolver);
@@ -326,8 +329,9 @@ public boolean handlePost(AnalyzeForm form, BindException errors) throws Excepti
{
ViewBackgroundInfo info = getViewBackgroundInfo();
PipeRoot root = PipelineService.get().findPipelineRoot(info.getContainer());
- PipelineJob job = NextFlowPipelineJob.create(info, root, configFile.toPath(), inputFiles.stream().map(File::toPath).toList());
+ NextFlowPipelineJob job = NextFlowPipelineJob.create(info, root, configFile.toPath(), inputFiles.stream().map(File::toPath).toList());
PipelineService.get().queueJob(job);
+ LOG.info("NextFlow job queued: {}", job.getJsonJobInfo(false));
}
}
diff --git a/nextflow/src/org/labkey/nextflow/NextFlowManager.java b/nextflow/src/org/labkey/nextflow/NextFlowManager.java
index e560e413..a1b115c7 100644
--- a/nextflow/src/org/labkey/nextflow/NextFlowManager.java
+++ b/nextflow/src/org/labkey/nextflow/NextFlowManager.java
@@ -3,8 +3,16 @@
import org.apache.commons.lang3.StringUtils;
import org.labkey.api.data.Container;
import org.labkey.api.data.CoreSchema;
+import org.labkey.api.data.DbSchema;
+import org.labkey.api.data.DbSchemaType;
import org.labkey.api.data.DbScope;
import org.labkey.api.data.PropertyManager;
+import org.labkey.api.data.SQLFragment;
+import org.labkey.api.data.SqlExecutor;
+import org.labkey.api.data.SqlSelector;
+import org.labkey.api.pipeline.PipelineService;
+import org.labkey.api.pipeline.PipelineStatusFile;
+import org.labkey.nextflow.pipeline.NextFlowPipelineJob;
import org.springframework.validation.BindException;
import java.nio.file.Files;
@@ -27,6 +35,8 @@ public class NextFlowManager
private static final String NEXTFLOW_S3_BUCKET_PATH = "s3BucketPath";
private static final String NEXTFLOW_API_KEY = "apiKey";
+ public static final String SCHEMA_NAME = "nextflow";
+
private static final String IS_NEXTFLOW_ENABLED = "enabled";
private static final NextFlowManager _instance = new NextFlowManager();
@@ -158,4 +168,42 @@ public void saveEnabledState(Container container, Boolean enabled)
map.save();
}
}
+
+ private DbSchema getDbSchema()
+ {
+ return DbSchema.get(SCHEMA_NAME, DbSchemaType.Module);
+ }
+
+ private Integer getJobId(NextFlowPipelineJob job)
+ {
+ PipelineStatusFile file = PipelineService.get().getStatusFile(job.getJobGUID());
+ return file == null ? null : file.getRowId();
+ }
+
+ public int getInvocationCount(NextFlowPipelineJob job)
+ {
+ return getInvocationCount(getJobId(job));
+ }
+
+ private int getInvocationCount(int jobId)
+ {
+ Integer result = new SqlSelector(getDbSchema(), new SQLFragment("SELECT InvocationCount FROM nextflow.Job WHERE JobId = ?", jobId)).getObject(Integer.class);
+ return result != null ? result.intValue() : 0;
+ }
+
+ public int incrementInvocationCount(NextFlowPipelineJob job)
+ {
+ int jobId = getJobId(job);
+ int current = getInvocationCount(jobId);
+ current++;
+ if (current == 1)
+ {
+ new SqlExecutor(getDbSchema()).execute(new SQLFragment("INSERT INTO nextflow.Job (JobId, InvocationCount) VALUES (?, ?)", jobId, current));
+ }
+ else
+ {
+ new SqlExecutor(getDbSchema()).execute(new SQLFragment("UPDATE nextflow.Job SET InvocationCount = ? WHERE JobId = ?", current, jobId));
+ }
+ return current;
+ }
}
diff --git a/nextflow/src/org/labkey/nextflow/NextFlowModule.java b/nextflow/src/org/labkey/nextflow/NextFlowModule.java
index 46853d27..83dac6fc 100644
--- a/nextflow/src/org/labkey/nextflow/NextFlowModule.java
+++ b/nextflow/src/org/labkey/nextflow/NextFlowModule.java
@@ -1,6 +1,7 @@
package org.labkey.nextflow;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import org.labkey.api.data.ContainerManager;
import org.labkey.api.module.ModuleContext;
import org.labkey.api.module.SpringModule;
@@ -40,13 +41,18 @@ protected void init()
@Override
public boolean hasScripts()
{
- return false;
+ return true;
}
@Override
- public @NotNull Collection getSchemaNames()
+ public @Nullable Double getSchemaVersion()
{
- return List.of();
+ return 25.000;
}
+ @Override
+ public @NotNull Collection getSchemaNames()
+ {
+ return List.of(NextFlowManager.SCHEMA_NAME);
+ }
}
diff --git a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java
index 8a3cd0c0..137a9dca 100644
--- a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java
+++ b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java
@@ -20,6 +20,7 @@
import org.labkey.api.util.StringUtilsLabKey;
import org.labkey.api.util.logging.LogHelper;
import org.labkey.api.view.ViewBackgroundInfo;
+import org.labkey.nextflow.NextFlowManager;
import java.io.BufferedWriter;
import java.io.File;
@@ -59,25 +60,24 @@ public NextFlowPipelineJob(ViewBackgroundInfo info, @NotNull PipeRoot root, Path
super(new NextFlowProtocol(), NextFlowPipelineProvider.NAME, info, root, config.getFileName().toString(), config, inputFiles, false, false);
this.config = config;
setLogFile(log);
- LOG.info("NextFlow job queued: {}", getJsonJobInfo(null));
}
- protected JSONObject getJsonJobInfo(Long invocationCount)
+ public JSONObject getJsonJobInfo(boolean includeInvocationCount)
{
JSONObject result = new JSONObject();
result.put("user", getUser().getEmail());
result.put("container", getContainer().getPath());
result.put("filePath", getLogFilePath().getParent().toString());
- result.put("runName", getNextFlowRunName(invocationCount));
+ result.put("runName", getNextFlowRunName(includeInvocationCount));
result.put("configFile", getConfig().getFileName().toString());
return result;
}
- protected String getNextFlowRunName(Long invocationCount)
+ protected String getNextFlowRunName(boolean includeInvocationCount)
{
PipelineStatusFile file = PipelineService.get().getStatusFile(getJobGUID());
String result = file == null ? "Unknown" : ("LabKeyJob" + file.getRowId());
- result += invocationCount == null ? "" : ("_" + invocationCount);
+ result += includeInvocationCount ? ("_" + NextFlowManager.get().getInvocationCount(this)) : "";
return result;
}
diff --git a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java
index 2749796f..1a64acc9 100644
--- a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java
+++ b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java
@@ -2,9 +2,6 @@
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
-import org.labkey.api.data.ContainerManager;
-import org.labkey.api.data.DbSequence;
-import org.labkey.api.data.DbSequenceManager;
import org.labkey.api.exp.XarFormatException;
import org.labkey.api.pipeline.AbstractTaskFactory;
import org.labkey.api.pipeline.AbstractTaskFactorySettings;
@@ -40,8 +37,6 @@ public class NextFlowRunTask extends WorkDirectoryTask
public static final String ACTION_NAME = "NextFlow";
- private static final DbSequence INVOCATION_SEQUENCE = DbSequenceManager.get(ContainerManager.getRoot(), NextFlowRunTask.class.getName());
-
public NextFlowRunTask(Factory factory, PipelineJob job)
{
super(factory, job);
@@ -54,9 +49,9 @@ public NextFlowRunTask(Factory factory, PipelineJob job)
// NextFlow requires a unique job name for every execution. Increment a counter to append as a suffix to
// ensure uniqueness
- long invocationCount = INVOCATION_SEQUENCE.next();
- INVOCATION_SEQUENCE.sync();
- NextFlowPipelineJob.LOG.info("Starting to execute NextFlow: {}", getJob().getJsonJobInfo(invocationCount));
+ NextFlowManager.get().incrementInvocationCount(getJob());
+
+ NextFlowPipelineJob.LOG.info("Starting to execute NextFlow: {}", getJob().getJsonJobInfo(true));
SecurityManager.TransformSession session = null;
boolean success = false;
@@ -83,10 +78,10 @@ public NextFlowRunTask(Factory factory, PipelineJob job)
File dir = getJob().getLogFile().getParentFile();
getJob().runSubProcess(secretsPB, dir);
- ProcessBuilder executionPB = new ProcessBuilder(getArgs(invocationCount));
+ ProcessBuilder executionPB = new ProcessBuilder(getArgs());
getJob().runSubProcess(executionPB, dir);
log.info("Job Finished");
- NextFlowPipelineJob.LOG.info("Finished executing NextFlow: {}", getJob().getJsonJobInfo(invocationCount));
+ NextFlowPipelineJob.LOG.info("Finished executing NextFlow: {}", getJob().getJsonJobInfo(true));
RecordedAction action = new RecordedAction(ACTION_NAME);
for (Path inputFile : getJob().getInputFilePaths())
@@ -110,7 +105,7 @@ public NextFlowRunTask(Factory factory, PipelineJob job)
}
if (!success)
{
- NextFlowPipelineJob.LOG.info("Failed executing NextFlow: {}", getJob().getJsonJobInfo(invocationCount));
+ NextFlowPipelineJob.LOG.info("Failed executing NextFlow: {}", getJob().getJsonJobInfo(true));
}
}
}
@@ -176,7 +171,7 @@ private boolean hasAwsSection(Path configFile) throws PipelineJobException
}
- private @NotNull List getArgs(long invocationCount) throws PipelineJobException
+ private @NotNull List getArgs() throws PipelineJobException
{
NextFlowConfiguration config = NextFlowManager.get().getConfiguration();
Path configFile = getJob().getConfig();
@@ -201,7 +196,7 @@ private boolean hasAwsSection(Path configFile) throws PipelineJobException
args.add("-c");
args.add(configFile.toAbsolutePath().toString());
args.add("-name");
- args.add(getJob().getNextFlowRunName(invocationCount));
+ args.add(getJob().getNextFlowRunName(true));
return args;
}
diff --git a/panoramapublic/src/org/labkey/panoramapublic/PanoramaPublicFileImporter.java b/panoramapublic/src/org/labkey/panoramapublic/PanoramaPublicFileImporter.java
index 2a65f28d..9048e16c 100644
--- a/panoramapublic/src/org/labkey/panoramapublic/PanoramaPublicFileImporter.java
+++ b/panoramapublic/src/org/labkey/panoramapublic/PanoramaPublicFileImporter.java
@@ -16,6 +16,7 @@
import org.labkey.api.pipeline.PipelineService;
import org.labkey.api.query.BatchValidationException;
import org.labkey.api.security.User;
+import org.labkey.api.targetedms.TargetedMSService;
import org.labkey.api.writer.VirtualFile;
import org.labkey.panoramapublic.pipeline.CopyExperimentPipelineJob;
@@ -25,6 +26,7 @@
import java.nio.file.Paths;
import java.util.List;
import java.util.Objects;
+import java.util.Optional;
/**
* This importer does a file move instead of copy to the temp directory and creates a symlink in place of the original
@@ -90,6 +92,7 @@ public void process(@Nullable PipelineJob job, FolderImportContext ctx, VirtualF
PanoramaPublicSymlinkManager.get().moveAndSymLinkDirectory(expJob, ctx.getContainer(), sourceFiles, targetFiles, log);
alignDataFileUrls(expJob.getUser(), ctx.getContainer(), log);
+ updateSkydDataIds(expJob.getUser(), ctx.getContainer(), log);
}
}
@@ -154,6 +157,78 @@ private void alignDataFileUrls(User user, Container targetContainer, Logger log)
}
}
+ /**
+ * Fixes incorrect skydDataId reference in TargetedMSRun. This happens when the relative locations of the sky.zip
+ * and .skyd file are non-standard in the folder being copied.
+ *
+ * When a sky.zip file or its exploded folder are moved, post-import, so that the relative locations of sky.zip and
+ * its corresponding .skyd file are non-standard, two ExpData rows are created for the skyd file in the Panorama Public
+ * copy pipeline job.
+ * The first ExpData (linked to the ExpRun) is created during XAR import.
+ * The second ExpData (not linked to the ExpRun) is created in the SkylineDocumentParser.parseChromatograms() method.
+ * Normally, while running the copy pipeline job, SkylineDocumentParser.parseChromatograms() does not have to create
+ * a new ExpData, since an ExpData with the expected path already exists.
+ * Having 2 ExpDatas causes:
+ * 1. The skydDataId in TargetedMSRun references an ExpData not linked to the ExpRun. It refers to a file in the
+ * 'export' directory which gets deleted after folder import.
+ * 2. FK violations during cleanup (CopyExperimentFinalTask.cleanupExportDirectory()) prevents deletion of ExpData
+ * corresponding to the skydDataId
+ *
+ * This method finds a match and updates skydDataId in TargetedMSRun in the case where the skyDataId is not linked
+ * to the ExpRun.
+ */
+ private void updateSkydDataIds(User user, Container targetContainer, Logger log) throws BatchValidationException, ImportException
+ {
+ log.info("Updating skydDataIds in folder: " + targetContainer.getPath());
+
+ boolean errors = false;
+ ExperimentService expService = ExperimentService.get();
+ List extends ExpRun> runs = expService.getExpRuns(targetContainer, null, null);
+
+ TargetedMSService tmsService = TargetedMSService.get();
+ for (ExpRun run : runs)
+ {
+ var targetedmsRun = tmsService.getRunByLsid(run.getLSID(), targetContainer);
+ if (targetedmsRun == null) continue;
+
+ var skydDataId = targetedmsRun.getSkydDataId();
+ if (skydDataId == null) continue;
+
+ var skydData = expService.getExpData(skydDataId);
+ if (skydData == null)
+ {
+ log.error("Could not find a row for skydDataId " + skydDataId + " for run " + targetedmsRun.getFileName());
+ errors = true;
+ }
+ else if (skydData.getRun() == null)
+ {
+ // skydData is not associated with an ExpRun. Find an ExpData associated with the ExpRun that matches
+ // the skydName and update the skydDataId on the run.
+ String skydName = skydData.getName();
+ Optional extends ExpData> matchingData = run.getAllDataUsedByRun().stream()
+ .filter(data -> Objects.equals(skydName, data.getName()))
+ .findFirst();
+
+ if (matchingData.isPresent())
+ {
+ ExpData data = matchingData.get();
+ log.debug("Updating skydDataId for run " + targetedmsRun.getFileName() + " to " + data.getRowId());
+ tmsService.updateSkydDataId(targetedmsRun, data, user);
+ }
+ else
+ {
+ log.error("Could not find matching skyData for run " + targetedmsRun.getFileName());
+ errors = true;
+ }
+ }
+ }
+
+ if (errors)
+ {
+ throw new ImportException("Could not update skydDataIds");
+ }
+ }
+
public static class Factory extends AbstractFolderImportFactory
{
@Override
diff --git a/panoramapublic/src/org/labkey/panoramapublic/query/ExperimentAnnotationsTableInfo.java b/panoramapublic/src/org/labkey/panoramapublic/query/ExperimentAnnotationsTableInfo.java
index 4c93fbea..622a5181 100644
--- a/panoramapublic/src/org/labkey/panoramapublic/query/ExperimentAnnotationsTableInfo.java
+++ b/panoramapublic/src/org/labkey/panoramapublic/query/ExperimentAnnotationsTableInfo.java
@@ -162,7 +162,10 @@ public void renderGridCellContents(RenderContext ctx, Writer out) throws IOExcep
.at(src, PageFlowUtil.staticResourceUrl("_images/plus.gif"))),
HtmlString.NBSP)
.appendTo(out);
- pageConfig.addHandler(spanId, "click", "viewExperimentDetails(this,'" + container.getPath() + "', '" + id + "','" + detailsPage + "')");
+ pageConfig.addHandler(spanId, "click", "viewExperimentDetails(this,"
+ + PageFlowUtil.jsString(container.getPath())
+ + ", " + id + ", "
+ + PageFlowUtil.jsString(detailsPage) + ")");
}
super.renderGridCellContents(ctx, out);
}
diff --git a/panoramapublic/test/src/org/labkey/test/tests/panoramapublic/PanoramaPublicMoveSkyDocTest.java b/panoramapublic/test/src/org/labkey/test/tests/panoramapublic/PanoramaPublicMoveSkyDocTest.java
index a469f8de..05c5f9dd 100644
--- a/panoramapublic/test/src/org/labkey/test/tests/panoramapublic/PanoramaPublicMoveSkyDocTest.java
+++ b/panoramapublic/test/src/org/labkey/test/tests/panoramapublic/PanoramaPublicMoveSkyDocTest.java
@@ -58,9 +58,46 @@ public void testExperimentCopy()
moveDocument(SKY_FILE_2, targetFolder, 2);
goToProjectFolder(projectName, targetFolder);
- log("Importing " + SKY_FILE_3 + " in folder " + skyDocSourceFolder);
+ log("Importing " + SKY_FILE_3 + " in folder " + targetFolder);
importData(SKY_FILE_3, 3);
+ // Test moving the sky.zip to a subdirectory in the file root, while the .skyd remains in the original location.
+ //
+ // If the sky.zip file and the .skyd file are not in their typical relative locations, the skydDataId in
+ // TargetedMSRun has to be updated after the folder is copied to Panorama Public. Without the update the
+ // copy pipeline job fails.
+ // Example:
+ // -------------------------
+ // BEFORE MOVE:
+ // SmallMolLibA.sky.zip
+ // SmallMolLibA
+ // - SmallMolLibA.sky
+ // - SmallMolLibA.skyd
+ // -------------------------
+ // AFTER MOVE:
+ // - SkylineFiles
+ // - SmallMolLibA.sky.zip (new location after move)
+ // SmallMolLibA
+ // - SmallMolLibA.sky
+ // - SmallMolLibA.skyd
+ // This results in:
+ // Two ExpData rows created for the .skyd file in folder copy on Panorama Public.
+ // 1. @files/export/.../Run/SkylineFiles/SmallMolLibA/SmallMolLibA.skyd
+ // 2. @files/SmallMolLibA/SmallMolLibA.skyd
+ // #1 is set as the skydDataId in TargetedMSRuns, but it is not linked to the ExpRun (runId is null)
+ // #2 is linked to the ExpRun. This is the ExpData that skydDataId in TargetedMSRun *should* refer to.
+ // This situation causes two problems
+ // 1. ExpData cleanup in CopyExperimentFinalTask fails due to FK violation - cannot delete ExpData #1 since
+ // skydDataId in TargetedMSRun points to it.
+ // 2. Even if we were not cleaning up ExpData referring to files in the 'export' directory, chromatogram data
+ // would become unavailable since the "export" directory gets deleted after folder import.
+ //
+ // PanoramaPublicFileImporter.updateSkydDataId() fixes the skydDataId, if required.
+ String subDir = "SkylineFiles";
+ log("Moving " + SKY_FILE_3 + " to sub directory " + subDir + " in the Files browser");
+ // Move the .sky.zip file to a subdirectory
+ moveSkyZipToSubDir(SKY_FILE_3, subDir);
+
log("Creating and submitting an experiment");
String experimentTitle = "Experiment to test moving Skyline documents from other folders";
var expWebPart = createExperimentCompleteMetadata(experimentTitle);
@@ -75,6 +112,15 @@ public void testExperimentCopy()
verifyRunFilePathRoot(SKY_FILE_1, PANORAMA_PUBLIC, panoramaCopyFolder);
verifyRunFilePathRoot(SKY_FILE_2, PANORAMA_PUBLIC, panoramaCopyFolder);
verifyRunFilePathRoot(SKY_FILE_3, PANORAMA_PUBLIC, panoramaCopyFolder);
+
+ // Verify that we can view chromatograms for the Skyline document that was moved to a subdirectory.
+ goToDashboard();
+ clickAndWait(Locator.linkContainingText(SKY_FILE_3));
+ clickAndWait(Locator.linkContainingText("2 replicates"));
+ clickAndWait(Locator.linkContainingText("FU2_2017_0915_RJ_05_1ab_30").index(0));
+ assertTextPresent("Sample File Summary");
+ assertTextPresent("Total Ion Chromatogram");
+ assertTextNotPresent("Unable to load chromatogram");
}
private void moveDocument(String skylineDocName, String targetFolder, int jobCount)
@@ -96,6 +142,18 @@ private void moveDocument(String skylineDocName, String targetFolder, int jobCou
verifyRunFilePathRoot(skylineDocName, getProjectName(), targetFolder);
}
+ private void moveSkyZipToSubDir(String documentName, String subDir)
+ {
+ portalHelper.goToModule("FileContent");
+ waitForText(documentName);
+ if (!_fileBrowserHelper.fileIsPresent(subDir))
+ {
+ _fileBrowserHelper.createFolder(subDir);
+ }
+ _fileBrowserHelper.moveFile(documentName, subDir);
+ }
+
+
private void verifyRunFilePathRoot(String skylineDocName, String projectName, String targetFolder)
{
// Verify that exp.run filePathRoot is set to the target folder
diff --git a/panoramapublic/webapp/PanoramaPublic/js/dropDownUtil.js b/panoramapublic/webapp/PanoramaPublic/js/dropDownUtil.js
index 08af5147..cb4252d3 100644
--- a/panoramapublic/webapp/PanoramaPublic/js/dropDownUtil.js
+++ b/panoramapublic/webapp/PanoramaPublic/js/dropDownUtil.js
@@ -66,12 +66,13 @@ viewExperimentDetails = function (obj, experimentContainer, id, detailsPageURL)
var results;
if(object.rows[rowNum][type] != null)
{
- if(object.rows[rowNum][type].length > 500)
+ let description = object.rows[rowNum][type];
+ if(description.length > 500)
{
- results = object.rows[rowNum][type].substring(0,500)+"...more.";
+ results = LABKEY.Utils.encodeHtml(description.substring(0,500)) +"...more.";
}
else {
- results =object.rows[rowNum][type];
+ results = LABKEY.Utils.encodeHtml(description);
}
}
else {results = null;}