diff --git a/elispotassay/src/org/labkey/elispot/pipeline/BackgroundSubtractionJob.java b/elispotassay/src/org/labkey/elispot/pipeline/BackgroundSubtractionJob.java index 9416b3077..b0845c7f4 100644 --- a/elispotassay/src/org/labkey/elispot/pipeline/BackgroundSubtractionJob.java +++ b/elispotassay/src/org/labkey/elispot/pipeline/BackgroundSubtractionJob.java @@ -48,7 +48,6 @@ import org.labkey.elispot.plate.PlateInfo; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; @@ -72,7 +71,7 @@ public BackgroundSubtractionJob(String provider, ViewBackgroundInfo info, PipeRo { super(provider, info, root); - File logFile = FileUtil.createTempFile("backgroundSubtractionJob", ".log", root.getLogDirectory()); + FileLike logFile = FileUtil.createTempFile("backgroundSubtractionJob", ".log", root.getLogDirectory(true)); setLogFile(logFile); _runs = runs; diff --git a/flow/enginesrc/org/labkey/flow/persist/AttributeSet.java b/flow/enginesrc/org/labkey/flow/persist/AttributeSet.java index a475c52f9..d8bb7c8c2 100644 --- a/flow/enginesrc/org/labkey/flow/persist/AttributeSet.java +++ b/flow/enginesrc/org/labkey/flow/persist/AttributeSet.java @@ -35,9 +35,8 @@ import org.labkey.flow.flowdata.xml.Graph; import org.labkey.flow.flowdata.xml.Keyword; import org.labkey.flow.flowdata.xml.Statistic; +import org.labkey.vfs.FileLike; -import java.io.File; -import java.io.FileOutputStream; import java.io.OutputStream; import java.io.Serializable; import java.net.URI; @@ -95,8 +94,7 @@ public AttributeSet(FlowData data, URI uri) { Set aliases = new LinkedHashSet<>(); _keywordAliases.put(name, aliases); - for (String alias : keyword.getAliases().getAliasArray()) - aliases.add(alias); + Collections.addAll(aliases, keyword.getAliases().getAliasArray()); } } } @@ -381,12 +379,13 @@ public Collection getGraphAliases(GraphSpec spec) return Collections.unmodifiableCollection(aliases); } - public void save(File file, DataBaseType dbt) throws Exception + public void save(FileLike file, DataBaseType dbt) throws Exception { dbt.setDataFileUrl(file.toURI().toString()); - OutputStream os = new FileOutputStream(file); - save(os); - os.close(); + try (OutputStream os = file.openOutputStream()) + { + save(os); + } } public void save(OutputStream os) throws Exception diff --git a/flow/src/org/labkey/flow/FlowSettings.java b/flow/src/org/labkey/flow/FlowSettings.java index cf71979bf..a59287892 100644 --- a/flow/src/org/labkey/flow/FlowSettings.java +++ b/flow/src/org/labkey/flow/FlowSettings.java @@ -66,15 +66,15 @@ static private FileLike getTempAnalysisDirectory() * * @return File object representing the Flow analysis working directory. */ - static public File getWorkingDirectory() + static public FileLike getWorkingDirectory() { //Get admin provided setting if it exists String path = getWorkingDirectoryPath(); if (path != null) - return new File(path); + return FileSystemLike.wrapFile(new File(path)); // Otherwise default to the - return FileSystemLike.toFile(getTempAnalysisDirectory()); + return getTempAnalysisDirectory(); } static public String getWorkingDirectoryPath() diff --git a/flow/src/org/labkey/flow/controllers/executescript/AnalysisScriptController.java b/flow/src/org/labkey/flow/controllers/executescript/AnalysisScriptController.java index 143b6a229..a9b16269e 100644 --- a/flow/src/org/labkey/flow/controllers/executescript/AnalysisScriptController.java +++ b/flow/src/org/labkey/flow/controllers/executescript/AnalysisScriptController.java @@ -394,12 +394,12 @@ protected ModelAndView uploadRuns(ImportRunsForm form, BindException errors) thr } validatePipeline(); - List files; + List files; PipeRoot pr = PipelineService.get().findPipelineRoot(getContainer()); if (form.isCurrent()) - files = Collections.singletonList(pr.resolvePath(form.getPath())); + files = Collections.singletonList(pr.resolvePathToFileLike(form.getPath())); else - files = form.getValidatedFiles(form.getContainer()).stream().map(FileLike::toNioPathForRead).map(Path::toFile).toList(); + files = form.getValidatedFiles(form.getContainer()); // validate target study Container targetStudy = getTargetStudy(form.getTargetStudy(), errors); @@ -736,7 +736,7 @@ private void getWorkspace(ImportAnalysisForm form, Errors errors) // - absolute (run path) // - a file-browser path (relative to pipe root but starts with '/') // - a file-browser path (relative to pipe root and doesn't start with '/') - private File getDir(String path, Errors errors) + private FileLike getDir(String path, Errors errors) { PipeRoot root = getPipeRoot(); File dir = new File(path); @@ -758,11 +758,11 @@ private File getDir(String path, Errors errors) errors.reject(ERROR_MSG, "The path specified must be a directory containing FCS files."); return null; } - return dir; + return FileSystemLike.wrapFile(dir); } // Get the directory to use as the file path root of the flow analysis run. - private File getRunPathRoot(List keywordDirs, SampleIdMap resolvedFCSFiles) + private FileLike getRunPathRoot(List keywordDirs, SampleIdMap resolvedFCSFiles) { if (keywordDirs != null && !keywordDirs.isEmpty()) { @@ -782,7 +782,7 @@ private File getRunPathRoot(List keywordDirs, SampleIdMap res FlowRun flowRun = fcsFile.getRun(); ExpRun expRun = flowRun != null ? flowRun.getExperimentRun() : null; if (expRun != null) - return expRun.getFilePathRoot(); + return expRun.getFilePathFileLike(); } } } @@ -792,7 +792,7 @@ private File getRunPathRoot(List keywordDirs, SampleIdMap res // Get the path to either the previously imported keyword run or // to the selected pipeline browser directory under the pipeline root. - private List getKeywordDirs(ImportAnalysisForm form, Errors errors) + private List getKeywordDirs(ImportAnalysisForm form, Errors errors) { String path = null; if (form.getKeywordDir() != null && form.getKeywordDir().length > 0) @@ -803,7 +803,7 @@ private List getKeywordDirs(ImportAnalysisForm form, Errors errors) if (path != null) { - File keywordDir = getDir(path, errors); + FileLike keywordDir = getDir(path, errors); if (errors.hasErrors()) return null; @@ -1115,7 +1115,7 @@ else if (fcsFilesOption == SelectFCSFileOption.Previous) else if (fcsFilesOption == SelectFCSFileOption.Browse) { WorkspaceData workspaceData = form.getWorkspace(); - List keywordDirs = getKeywordDirs(form, errors); + List keywordDirs = getKeywordDirs(form, errors); if (keywordDirs == null || keywordDirs.isEmpty()) errors.reject(ERROR_MSG, "No directory selected"); @@ -1125,7 +1125,7 @@ else if (fcsFilesOption == SelectFCSFileOption.Browse) if (errors.hasErrors()) return; - File keywordDir = keywordDirs.get(0); + FileLike keywordDir = keywordDirs.get(0); // Translate selected keyword directory into a existing keyword run if possible. FlowRun existingKeywordRun = null; @@ -1160,7 +1160,7 @@ else if (fcsFilesOption == SelectFCSFileOption.Browse) Map rows = new HashMap<>(); for (ISampleInfo sampleInfo : sampleInfos) { - File sampleFile = FileUtil.appendName(keywordDir, sampleInfo.getLabel()); + FileLike sampleFile = keywordDir.resolveChild(sampleInfo.getLabel()); boolean exists = sampleFile.exists(); if (exists) found = true; @@ -1316,7 +1316,7 @@ else if (workspace instanceof FlowJoWorkspace) private void stepChooseAnalysis(ImportAnalysisForm form, BindException errors) { - List keywordDirs = getKeywordDirs(form, errors); + List keywordDirs = getKeywordDirs(form, errors); if (errors.hasErrors()) return; @@ -1358,8 +1358,8 @@ private void stepChooseAnalysis(ImportAnalysisForm form, BindException errors) if (keywordDirs != null) { - for (File keywordDir : keywordDirs) - if (experiment.hasRun(keywordDir, null)) + for (FileLike keywordDir : keywordDirs) + if (experiment.hasRun(keywordDir.toNioPathForRead().toFile(), null)) { errors.reject(ERROR_MSG, "The '" + experiment.getName() + "' analysis folder already contains the FCS files from '" + keywordDir + "'."); return; @@ -1388,7 +1388,7 @@ private void stepChooseAnalysis(ImportAnalysisForm form, BindException errors) private void stepConfirm(ImportAnalysisForm form, BindException errors) throws Exception { - List keywordDirs = getKeywordDirs(form, errors); + List keywordDirs = getKeywordDirs(form, errors); if (errors.hasErrors()) return; @@ -1421,7 +1421,7 @@ private void stepConfirm(ImportAnalysisForm form, BindException errors) throws E throw new IllegalArgumentException("Wrong container"); WorkspaceData workspaceData = form.getWorkspace(); - File pipelineFile = null; + FileLike pipelineFile = null; ViewBackgroundInfo info = getViewBackgroundInfo(); if (getPipeRoot() == null) { @@ -1431,11 +1431,11 @@ private void stepConfirm(ImportAnalysisForm form, BindException errors) throws E else { if (workspaceData.getPath() != null) - pipelineFile = getPipeRoot().resolvePath(workspaceData.getPath()); + pipelineFile = getPipeRoot().resolvePathToFileLike(workspaceData.getPath()); } // Choose a run path root for the imported analysis based upon the input FCS files. - File runFilePathRoot = getRunPathRoot(keywordDirs, selectedFCSFiles); + FileLike runFilePathRoot = getRunPathRoot(keywordDirs, selectedFCSFiles); AnalysisEngine analysisEngine = getAnalysisEngine(form); if (errors.hasErrors()) @@ -1465,9 +1465,9 @@ private void stepConfirm(ImportAnalysisForm form, BindException errors) throws E else if (AnalysisEngine.Archive == analysisEngine) { assert (workspaceData.getWorkspaceObject() instanceof ExternalAnalysis); - File originalFile = pipelineFile; + FileLike originalFile = pipelineFile; if (workspaceData.getOriginalPath() != null) - originalFile = root.resolvePath(workspaceData.getOriginalPath()); + originalFile = root.resolvePathToFileLike(workspaceData.getOriginalPath()); job = new ImportResultsJob(info, getPipeRoot(), experiment, AnalysisEngine.Archive, pipelineFile, originalFile, runFilePathRoot, diff --git a/flow/src/org/labkey/flow/controllers/run/RunController.java b/flow/src/org/labkey/flow/controllers/run/RunController.java index 47f90c957..eeaaab8f4 100644 --- a/flow/src/org/labkey/flow/controllers/run/RunController.java +++ b/flow/src/org/labkey/flow/controllers/run/RunController.java @@ -87,6 +87,7 @@ import org.labkey.flow.view.ExportAnalysisForm; import org.labkey.flow.view.ExportAnalysisManifest; import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import org.springframework.validation.BindException; import org.springframework.validation.Errors; import org.springframework.web.servlet.ModelAndView; @@ -96,7 +97,6 @@ import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -605,16 +605,12 @@ else if (_wells != null && !_wells.isEmpty()) return _success = true; } - private void writeManifest(String manifestJson, File dir) throws IOException + private void writeManifest(String manifestJson, FileLike dir) throws IOException { if (manifestJson == null || manifestJson.isEmpty()) return; - - File file = FileUtil.appendName(dir, MANIFEST_FILENAME); - FileOutputStream statisticsFile = new FileOutputStream(file); - - try (PrintWriter pw = PrintWriters.getPrintWriter(statisticsFile)) + try (PrintWriter pw = PrintWriters.getPrintWriter(dir.resolveChild(MANIFEST_FILENAME).openOutputStream())) { pw.write(manifestJson); } @@ -690,7 +686,7 @@ URLHelper onExportComplete(ExportAnalysisForm form, VirtualFile vf, SampleIdMap< case Script: // after exporting the files, execute script as a pipeline job - File location = new File(vf.getLocation()); + FileLike location = FileSystemLike.wrapFile(new File(vf.getLocation())); PipeRoot root = PipelineService.get().findPipelineRoot(getContainer()); ViewBackgroundInfo vbi = new ViewBackgroundInfo(getContainer(), getUser(), null); @@ -742,7 +738,7 @@ private static class ExportToScriptJob extends PipelineJob private final String _exportToScriptCommandLine; private final String _exportToScriptFormat; private final String _label; - private final File _location; + private final FileLike _location; private final Integer _timeout; private final boolean _deleteOnComplete; @@ -753,7 +749,7 @@ protected ExportToScriptJob( @JsonProperty("_exportToScriptCommandLine") String exportToScriptCommandLine, @JsonProperty("_exportToScriptFormat") String exportToScriptFormat, @JsonProperty("_label") String label, - @JsonProperty("_location") File location, + @JsonProperty("_location") FileLike location, @JsonProperty("_timeout") Integer timeout, @JsonProperty("_deleteOnComplete") boolean deleteOnComplete ) @@ -768,7 +764,7 @@ protected ExportToScriptJob( _deleteOnComplete = deleteOnComplete; } - public ExportToScriptJob(String guid, String exportToScriptPath, String exportToScriptCommandLine, String exportToScriptFormat, String label, File location, Integer timeout, boolean deleteOnComplete, ViewBackgroundInfo info, @NotNull PipeRoot root) + public ExportToScriptJob(String guid, String exportToScriptPath, String exportToScriptCommandLine, String exportToScriptFormat, String label, FileLike location, Integer timeout, boolean deleteOnComplete, ViewBackgroundInfo info, @NotNull PipeRoot root) { super(null, info, root); _guid = guid; @@ -781,7 +777,7 @@ public ExportToScriptJob(String guid, String exportToScriptPath, String exportTo _deleteOnComplete = deleteOnComplete; // setup the log file - FileLike logFile = root.getLogDirectoryFileLike(true).resolveChild(FileUtil.makeFileNameWithTimestamp("export-to-script", "log")); + FileLike logFile = root.getLogDirectory(true).resolveChild(FileUtil.makeFileNameWithTimestamp("export-to-script", "log")); setLogFile(logFile); } @@ -799,10 +795,7 @@ public URLHelper getStatusHref() urlHelper = new URLHelper(url); } } - catch (Exception e) - { - urlHelper = null; - } + catch (Exception ignored) {} return urlHelper; } diff --git a/flow/src/org/labkey/flow/data/FlowCompensationMatrix.java b/flow/src/org/labkey/flow/data/FlowCompensationMatrix.java index 07cb6306a..9502e07be 100644 --- a/flow/src/org/labkey/flow/data/FlowCompensationMatrix.java +++ b/flow/src/org/labkey/flow/data/FlowCompensationMatrix.java @@ -25,7 +25,6 @@ import org.labkey.api.query.FieldKey; import org.labkey.api.query.QueryRowReference; import org.labkey.api.security.User; -import org.labkey.api.util.FileUtil; import org.labkey.api.view.ActionURL; import org.labkey.flow.FlowSettings; import org.labkey.flow.analysis.model.CompensationMatrix; @@ -88,7 +87,7 @@ static public FlowCompensationMatrix create(User user, Container container, Stri { data = svc.createData(container, FlowDataType.CompensationMatrix, name); } - data.setDataFileURI(FileUtil.appendName(FlowSettings.getWorkingDirectory(), "compensation." + FlowDataHandler.EXT_DATA).toURI()); + data.setDataFileURI(FlowSettings.getWorkingDirectory().resolveChild("compensation." + FlowDataHandler.EXT_DATA).toURI()); data.save(user); AttributeSetHelper.doSave(attrs, user, data, log); flowComp = (FlowCompensationMatrix) FlowDataObject.fromData(data); diff --git a/flow/src/org/labkey/flow/data/FlowExperiment.java b/flow/src/org/labkey/flow/data/FlowExperiment.java index 3caea00c1..fd95f4ab4 100644 --- a/flow/src/org/labkey/flow/data/FlowExperiment.java +++ b/flow/src/org/labkey/flow/data/FlowExperiment.java @@ -16,8 +16,6 @@ package org.labkey.flow.data; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; import org.jetbrains.annotations.Nullable; import org.labkey.api.data.Container; import org.labkey.api.exp.api.ExpExperiment; @@ -33,21 +31,21 @@ import org.labkey.flow.query.FlowTableType; import jakarta.servlet.http.HttpServletRequest; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.Date; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; /** * Also known as "Analysis Folder" in flow terms or "Run Group" in exp terms. */ public class FlowExperiment extends FlowObject { - static private final Logger _log = LogManager.getLogger(FlowExperiment.class); static public String FlowExperimentRunExperimentName = "Flow Experiment Runs"; static public String FlowWorkspaceExperimentName = "Flow Workspace"; static public String DEFAULT_ANALYSIS_NAME = "Analysis"; @@ -92,7 +90,7 @@ static public FlowExperiment[] getExperiments(Container container) static public FlowExperiment[] getAnalyses(Container container) { - List ret = new ArrayList(); + List ret = new ArrayList<>(); for (FlowExperiment experiment : getExperiments(container)) { if (experiment.isAnalysis()) @@ -103,31 +101,6 @@ static public FlowExperiment[] getAnalyses(Container container) return ret.toArray(new FlowExperiment[0]); } - /** - * Generate an unused FlowExperiment name using "Analysis" as the starting name. - */ - static public String generateUnusedName(Container container) - { - return generateUnusedName(container, DEFAULT_ANALYSIS_NAME); - } - - static public String generateUnusedName(Container container, String baseName) - { - Set namesInUse = new HashSet<>(); - for (FlowExperiment analysis : FlowExperiment.getAnalyses(container)) - namesInUse.add(analysis.getName().toLowerCase()); - - String newAnalysisName = baseName; - int nameIndex = 0; - while (namesInUse.contains(newAnalysisName.toLowerCase())) - { - nameIndex++; - newAnalysisName = baseName + nameIndex; - } - - return newAnalysisName; - } - static public FlowExperiment getForName(User user, Container container, String name) { String lsid = ExperimentService.get().generateLSID(container, ExpExperiment.class, name); @@ -148,7 +121,7 @@ static public FlowExperiment createForName(User user, Container container, Strin static public FlowExperiment[] getAnalysesAndWorkspace(Container container) { - List ret = new ArrayList(); + List ret = new ArrayList<>(); for (FlowExperiment experiment : getExperiments(container)) { if (experiment.isAnalysis() || experiment.isWorkspace()) @@ -159,14 +132,6 @@ static public FlowExperiment[] getAnalysesAndWorkspace(Container container) return ret.toArray(new FlowExperiment[0]); } - static public FlowExperiment getDefaultAnalysis(Container container) - { - FlowExperiment[] experiments = getAnalyses(container); - if (experiments.length == 0) - return null; - return experiments[0]; - } - static public String getExperimentRunExperimentLSID(Container container) { return FlowObject.generateLSID(container, "Experiment", FlowExperimentRunExperimentName); @@ -182,11 +147,6 @@ static public String getWorkspaceLSID(Container container) return FlowObject.generateLSID(container, "Experiment", FlowWorkspaceExperimentName); } - static public String getWorkspaceRunExperimentName(Container container) - { - return FlowWorkspaceExperimentName; - } - static public FlowExperiment fromURL(ActionURL url, Container actionContainer, User user) { return fromURL(url, null, actionContainer, user); @@ -264,12 +224,12 @@ public boolean hasRun(File filePath, @Nullable FlowProtocolStep step) return false; } - public List findRun(File filePath, FlowProtocolStep step) + public List findRun(FileLike filePath, FlowProtocolStep step) { List ret = new ArrayList<>(); for (FlowRun run : getRuns(step)) { - if (filePath.equals(run.getExperimentRun().getFilePathRoot())) + if (filePath.toNioPathForRead().toFile().equals(run.getExperimentRun().getFilePathRoot())) { ret.add(run); } @@ -355,21 +315,12 @@ static public FlowExperiment getWorkspace(Container container) return FlowExperiment.fromLSID(getWorkspaceLSID(container)); } - static public FlowExperiment ensureWorkspace(User user, Container container) - { - FlowExperiment ret = getWorkspace(container); - if (ret != null) - return ret; - ExpExperiment exp = ExperimentService.get().createExpExperiment(container, FlowWorkspaceExperimentName); - exp.save(user); - return new FlowExperiment(exp); - } - public FlowCompensationMatrix findCompensationMatrix(FlowRun run) { List runs = new ArrayList<>(); - runs.addAll(findRun(new File(run.getPath()), FlowProtocolStep.analysis)); - runs.addAll(findRun(new File(run.getPath()), FlowProtocolStep.calculateCompensation)); + FileLike file = FileSystemLike.wrapFile(new File(run.getPath())); + runs.addAll(findRun(file, FlowProtocolStep.analysis)); + runs.addAll(findRun(file, FlowProtocolStep.calculateCompensation)); for (FlowRun runComp : runs) { FlowCompensationMatrix comp = runComp.getCompensationMatrix(); diff --git a/flow/src/org/labkey/flow/data/FlowProtocol.java b/flow/src/org/labkey/flow/data/FlowProtocol.java index da3ff7bb9..e205c87e2 100644 --- a/flow/src/org/labkey/flow/data/FlowProtocol.java +++ b/flow/src/org/labkey/flow/data/FlowProtocol.java @@ -82,6 +82,7 @@ import org.labkey.flow.query.FlowSchema; import org.labkey.flow.query.FlowTableType; import org.labkey.flow.script.KeywordsJob; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.sql.ResultSet; @@ -1003,7 +1004,7 @@ public void testSampleJoin() throws Exception // import some FCS files ViewBackgroundInfo info = new ViewBackgroundInfo(c, user, null); File dir = JunitUtil.getSampleData(null, "flow/flowjoquery/microFCS"); - KeywordsJob job = new KeywordsJob(info, protocol, List.of(dir), null, root); + KeywordsJob job = new KeywordsJob(info, protocol, List.of(FileSystemLike.wrapFile(dir)), null, root); List runs = job.go(); assertNotNull(runs); assertEquals(1, runs.size()); diff --git a/flow/src/org/labkey/flow/data/FlowRun.java b/flow/src/org/labkey/flow/data/FlowRun.java index 3f664267c..4c6ebe929 100644 --- a/flow/src/org/labkey/flow/data/FlowRun.java +++ b/flow/src/org/labkey/flow/data/FlowRun.java @@ -16,8 +16,6 @@ package org.labkey.flow.data; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; import org.jetbrains.annotations.NotNull; import org.labkey.api.attachments.Attachment; import org.labkey.api.attachments.AttachmentService; @@ -46,6 +44,8 @@ import org.labkey.flow.query.FlowTableType; import jakarta.servlet.http.HttpServletRequest; +import org.labkey.vfs.FileLike; + import java.io.File; import java.net.URI; import java.sql.ResultSet; @@ -59,19 +59,11 @@ public class FlowRun extends FlowObject { - private static final Logger _log = LogManager.getLogger(FlowRun.class); - public static final Comparator NAME_COMPARATOR = Comparator.comparing(FlowObject::getName); public static final Comparator CREATED_COMPARATOR = Comparator.comparing(o -> o.getExperimentRun().getCreated()); - static public String getRunLSIDPrefix() - { - // See ExperimentServiceImpl.getNamespacePrefix(ExpRunImpl.class) - return "Run"; - } - static public List fromRuns(List runs) { List ret = new ArrayList<>(runs.size()); @@ -143,11 +135,11 @@ public FlowWell[] getWells(boolean realFiles) URI uri = well.getFCSURI(); // XXX: hit the file system every time? if (uri != null && new File(uri.getPath()).canRead()) - wells.add((FlowWell) obj); + wells.add(well); } else { - wells.add((FlowWell) obj); + wells.add(well); } } } @@ -156,26 +148,6 @@ public FlowWell[] getWells(boolean realFiles) return ret; } - - public FlowWell getFirstWell() - { - if (_allDatas != null) - { - for (FlowDataObject obj : _allDatas) - if (obj instanceof FlowWell) - return (FlowWell)obj; - } - - for (ExpData data : getExperimentRun().getOutputDatas(null)) - { - FlowDataObject obj = FlowDataObject.fromData(data); - if (obj instanceof FlowWell) - return (FlowWell)obj; - } - return null; - } - - public FlowFCSFile[] getFCSFiles() { return getDatas(FlowDataType.FCSFile).toArray(new FlowFCSFile[0]); @@ -398,13 +370,13 @@ static public List getRunsForScript(Container container, FlowProtocolSt } @NotNull - static public List getRunsForPath(Container container, FlowProtocolStep step, File runFilePathRoot) + static public List getRunsForPath(Container container, FlowProtocolStep step, FileLike runFilePathRoot) { return getRunsForPath(container, step, runFilePathRoot, NAME_COMPARATOR); } @NotNull - static public List getRunsForPath(Container container, FlowProtocolStep step, File runFilePathRoot, Comparator comparator) + static public List getRunsForPath(Container container, FlowProtocolStep step, FileLike runFilePathRoot, Comparator comparator) { List ret = new ArrayList<>(); ExpProtocol childProtocol = null; @@ -419,7 +391,7 @@ static public List getRunsForPath(Container container, FlowProtocolStep } ExperimentService.get().getExpRuns(container, null, childProtocol, run -> - runFilePathRoot == null || (run.getFilePathRoot() != null && runFilePathRoot.equals(run.getFilePathRoot())) + runFilePathRoot == null || (run.getFilePathRoot() != null && runFilePathRoot.toNioPathForRead().toFile().equals(run.getFilePathRoot())) ).forEach( run -> ret.add(new FlowRun(run))); if (comparator != null) diff --git a/flow/src/org/labkey/flow/reports/FilterFlowReport.java b/flow/src/org/labkey/flow/reports/FilterFlowReport.java index 7f7cff4fb..e1cc14410 100644 --- a/flow/src/org/labkey/flow/reports/FilterFlowReport.java +++ b/flow/src/org/labkey/flow/reports/FilterFlowReport.java @@ -19,7 +19,6 @@ import com.fasterxml.jackson.annotation.JsonManagedReference; import org.apache.commons.beanutils.ConversionException; import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; import org.labkey.api.data.CachedResultSet; import org.labkey.api.data.CachedResultSets; import org.labkey.api.data.ColumnInfo; @@ -54,11 +53,11 @@ import org.labkey.flow.persist.FlowManager; import org.labkey.flow.query.FlowSchema; import org.labkey.flow.query.FlowTableType; +import org.labkey.vfs.FileLike; import org.springframework.beans.PropertyValue; import org.springframework.beans.PropertyValues; import javax.script.ScriptEngine; -import java.io.File; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; @@ -578,7 +577,7 @@ public Results generateResults(ViewContext context, boolean allowAsyncQuery) thr } @Override - protected String getScriptProlog(ScriptEngine engine, ViewContext context, File inputFile, Map inputParameters, boolean isRStudio) + protected String getScriptProlog(ScriptEngine engine, ViewContext context, FileLike inputFile, Map inputParameters, boolean isRStudio) { String labkeyProlog = super.getScriptProlog(engine, context, inputFile, inputParameters, isRStudio); @@ -603,14 +602,6 @@ protected String getScriptProlog(ScriptEngine engine, ViewContext context, File _report.addScriptProlog(context, reportProlog); return reportProlog.toString(); } - - @Override - public File getReportDir(@NotNull String executingContainerId) - { - // Issue 12625: Create unique directory for the background report job - boolean isPipeline = _report.saveToDomain(); - return super.getReportDir(executingContainerId, isPipeline); - } } protected boolean filterListEqual(List otherFilters) diff --git a/flow/src/org/labkey/flow/reports/FlowReportJob.java b/flow/src/org/labkey/flow/reports/FlowReportJob.java index 0a3ac0803..a6d782c60 100644 --- a/flow/src/org/labkey/flow/reports/FlowReportJob.java +++ b/flow/src/org/labkey/flow/reports/FlowReportJob.java @@ -41,8 +41,8 @@ import org.labkey.api.view.ViewContext; import org.labkey.flow.query.FlowTableType; import org.labkey.flow.script.FlowPipelineProvider; +import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; @@ -90,9 +90,9 @@ public Task createPipelineTask(PipelineJob job, Report report, Map outputSubst for (Tuple3 tuple : tuples) { - saveTsvOutput(tuple.first, tuple.second, tuple.third); + saveTsvOutput(tuple.first, tuple.second); if (getErrors() > 0) return; } @@ -184,17 +184,17 @@ private Domain ensureDomain(FlowTableType tableType) return domain; } - private void saveTsvOutput(TsvOutput tsv, Domain domain, FlowTableType tableType) throws Exception + private void saveTsvOutput(TsvOutput tsv, Domain domain) throws Exception { info("Importing tsv file '" + tsv + "' into domain " + domain.getName()); - for (File file : tsv.getFiles()) + for (FileLike file : tsv.getFiles()) { TabLoader loader = tsv.createTabLoader(file); mapTsvColumns(domain, loader); if (getErrors() > 0) return; - save(domain, loader, tableType); + save(domain, loader); info("Imported tsv file '" + tsv + "' into domain " + domain.getName()); } } @@ -236,7 +236,7 @@ private void deleteSavedResults() _report.deleteSavedResults(getContainer()); } - private void save(Domain domain, TabLoader loader, FlowTableType tableType) throws BatchValidationException, SQLException, IOException + private void save(Domain domain, TabLoader loader) throws BatchValidationException, SQLException, IOException { OntologyManager.ImportHelper helper = new OntologyManager.ImportHelper() { diff --git a/flow/src/org/labkey/flow/script/AbstractExternalAnalysisJob.java b/flow/src/org/labkey/flow/script/AbstractExternalAnalysisJob.java index 0c1bd8b8e..0315fda21 100644 --- a/flow/src/org/labkey/flow/script/AbstractExternalAnalysisJob.java +++ b/flow/src/org/labkey/flow/script/AbstractExternalAnalysisJob.java @@ -36,7 +36,6 @@ import org.labkey.api.pipeline.PipelineService; import org.labkey.api.query.FieldKey; import org.labkey.api.security.User; -import org.labkey.api.util.FileUtil; import org.labkey.api.util.Pair; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewBackgroundInfo; @@ -67,8 +66,8 @@ import org.labkey.flow.persist.ObjectType; import org.labkey.flow.util.KeywordUtil; import org.labkey.flow.util.SampleUtil; +import org.labkey.vfs.FileLike; -import java.io.File; import java.net.URI; import java.sql.SQLException; import java.util.ArrayList; @@ -89,10 +88,10 @@ public abstract class AbstractExternalAnalysisJob extends FlowExperimentJob { private final AnalysisEngine _analysisEngine; private final FlowExperiment _experiment; - private final File _originalImportedFile; - private final File _runFilePathRoot; + private final FileLike _originalImportedFile; + private final FileLike _runFilePathRoot; // Directories of FCS files to be imported. - private final List _keywordDirs; + private final List _keywordDirs; // Map workspace sample ID -> FlowFCSFile (or FlowFCSFile.UNMAPPED if we aren't resolving previously imported FCS files) private SampleIdMap _selectedFCSFiles; private List _newFlowWells; @@ -107,9 +106,9 @@ public abstract class AbstractExternalAnalysisJob extends FlowExperimentJob protected AbstractExternalAnalysisJob( @JsonProperty("_analysisEngine") AnalysisEngine analysisEngine, @JsonProperty("_experiment") FlowExperiment experiment, - @JsonProperty("_originalImportedFile") File originalImportedFile, - @JsonProperty("_runFilePathRoot") File runFilePathRoot, - @JsonProperty("_keywordDirs") List keywordDirs, + @JsonProperty("_originalImportedFile") FileLike originalImportedFile, + @JsonProperty("_runFilePathRoot") FileLike runFilePathRoot, + @JsonProperty("_keywordDirs") List keywordDirs, @JsonProperty("_targetStudy") Container targetStudy, @JsonProperty("_failOnError") boolean failOnError) { @@ -127,9 +126,9 @@ protected AbstractExternalAnalysisJob( PipeRoot root, FlowExperiment experiment, AnalysisEngine analysisEngine, - File originalImportedFile, - File runFilePathRoot, - List keywordDirs, + FileLike originalImportedFile, + FileLike runFilePathRoot, + List keywordDirs, SampleIdMap selectedFCSFiles, //List importGroupNames, Container targetStudy, @@ -173,17 +172,17 @@ public ActionURL urlData() // return _importGroupNames; // } - public File getRunFilePathRoot() + public FileLike getRunFilePathRoot() { return _runFilePathRoot; } - public List getKeywordDirectories() + public List getKeywordDirectories() { return _keywordDirs; } - protected File getOriginalImportedFile() + protected FileLike getOriginalImportedFile() { return _originalImportedFile; } @@ -364,15 +363,15 @@ protected boolean matchesFilter(TableInfo fcsFilesTable, SimpleFilter filter, St protected abstract FlowRun createExperimentRun() throws Exception; protected abstract ExpData createExternalAnalysisData(ExperimentService svc, - ExpRun externalAnalysisRun, - User user, Container container, - String analysisName, - File externalAnalysisFile, - File originalImportedFile); + ExpRun externalAnalysisRun, + User user, Container container, + String analysisName, + FileLike externalAnalysisFile, + FileLike originalImportedFile); protected FlowRun saveAnalysis(User user, Container container, FlowExperiment experiment, - String analysisName, File externalAnalysisFile, File originalImportedFile, - File runFilePathRoot, + String analysisName, FileLike externalAnalysisFile, FileLike originalImportedFile, + FileLike runFilePathRoot, SampleIdMap selectedFCSFiles, SampleIdMap keywordsMap, SampleIdMap sampleCompMatrixMap, @@ -384,7 +383,7 @@ protected FlowRun saveAnalysis(User user, Container container, FlowExperiment ex MultiValuedMap sampleIdToNameMap) throws Exception { // Fake file URI set on the FCSFile/FCSAnalsyis ExpData to ensure it's recognized by the FlowDataHandler. - URI dataFileURI = FileUtil.appendName(externalAnalysisFile.getParentFile(), "attributes.flowdata.xml").toURI(); + URI dataFileURI = externalAnalysisFile.getParent().resolveChild("attributes.flowdata.xml").toURI(); // Prepare comp matrices for saving Map compMatrixMap = new HashMap<>(); diff --git a/flow/src/org/labkey/flow/script/AnalysisHandler.java b/flow/src/org/labkey/flow/script/AnalysisHandler.java index 55af51034..bc0425989 100644 --- a/flow/src/org/labkey/flow/script/AnalysisHandler.java +++ b/flow/src/org/labkey/flow/script/AnalysisHandler.java @@ -37,8 +37,9 @@ import org.labkey.flow.persist.FlowDataHandler; import org.labkey.flow.persist.InputRole; import org.labkey.flow.persist.ObjectType; +import org.labkey.vfs.FileLike; -import java.io.File; +import java.io.OutputStream; import java.net.URI; import java.sql.SQLException; import java.util.List; @@ -73,7 +74,7 @@ synchronized public DataBaseType addWell(ExperimentRunType runElement, FlowFCSFi } @Override - public void processRun(FlowRun run, ExperimentRunType runElement, File workingDirectory) throws Exception + public void processRun(FlowRun run, ExperimentRunType runElement, FileLike workingDirectory) throws Exception { FlowCompensationMatrix flowComp; @@ -133,9 +134,12 @@ public void processRun(FlowRun run, ExperimentRunType runElement, File workingDi FlowScript script = wells[iWell].getScript(); if (script.getScriptId() != _job._runAnalysisScript.getScriptId()) { - File file = _job.decideFileName(workingDirectory, URIUtil.getFilename(srcWell.getFCSURI()), FlowDataHandler.EXT_SCRIPT); + FileLike file = _job.decideFileName(workingDirectory, URIUtil.getFilename(srcWell.getFCSURI()), FlowDataHandler.EXT_SCRIPT); ScriptDocument doc = script.getAnalysisScriptDocument(); - doc.save(file); + try (OutputStream out = file.openOutputStream()) + { + doc.save(out); + } ProtocolApplicationBaseType app = addProtocolApplication(runElement, null); scriptLSID = ExperimentService.get().generateGuidLSID(getContainer(), FlowDataType.Script); @@ -180,7 +184,7 @@ synchronized int getNextWellIndex() private class AnalyzeTask implements Runnable { - File _workingDirectory; + FileLike _workingDirectory; FlowRun _run; FlowWell _well; int _wellCount; @@ -190,7 +194,7 @@ private class AnalyzeTask implements Runnable Analysis _groupAnalysis; String _scriptLSID; - AnalyzeTask(File workingDirectory, FlowRun run, ExperimentRunType runElement, FlowWell well, int wellCount, String scriptLSID, Analysis groupAnalysis, FlowCompensationMatrix flowComp, CompensationMatrix comp) + AnalyzeTask(FileLike workingDirectory, FlowRun run, ExperimentRunType runElement, FlowWell well, int wellCount, String scriptLSID, Analysis groupAnalysis, FlowCompensationMatrix flowComp, CompensationMatrix comp) { _workingDirectory = workingDirectory; _run = run; diff --git a/flow/src/org/labkey/flow/script/AnalyzeJob.java b/flow/src/org/labkey/flow/script/AnalyzeJob.java index 574c4bf6c..fbc05fe54 100644 --- a/flow/src/org/labkey/flow/script/AnalyzeJob.java +++ b/flow/src/org/labkey/flow/script/AnalyzeJob.java @@ -26,6 +26,8 @@ import org.labkey.flow.data.FlowProtocolStep; import org.labkey.flow.data.FlowRun; import org.labkey.flow.data.FlowScript; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.util.List; @@ -59,15 +61,16 @@ protected String getRunName(String name) public void processRun(FlowRun run) throws Exception { + FileLike file = FileSystemLike.wrapFile(new File(run.getPath())); if (_step == FlowProtocolStep.calculateCompensation) { - if (!checkProcessPath(new File(run.getPath()), FlowProtocolStep.calculateCompensation)) + if (!checkProcessPath(file, FlowProtocolStep.calculateCompensation)) return; executeHandler(run, getCompensationCalculationHandler()); } else { - if (!checkProcessPath(new File(run.getPath()), FlowProtocolStep.analysis)) + if (!checkProcessPath(file, FlowProtocolStep.analysis)) return; ensureCompensationMatrix(run); executeHandler(run, getAnalysisHandler()); diff --git a/flow/src/org/labkey/flow/script/BaseHandler.java b/flow/src/org/labkey/flow/script/BaseHandler.java index 96b551edd..6b8fab650 100644 --- a/flow/src/org/labkey/flow/script/BaseHandler.java +++ b/flow/src/org/labkey/flow/script/BaseHandler.java @@ -23,8 +23,8 @@ import org.labkey.flow.data.*; import org.labkey.flow.persist.AttributeSet; import org.labkey.flow.persist.InputRole; +import org.labkey.vfs.FileLike; -import java.io.File; import java.util.List; abstract public class BaseHandler @@ -110,7 +110,7 @@ else if (res instanceof FCSAnalyzer.GraphResult) return true; } - abstract public void processRun(FlowRun srcRun, ExperimentRunType runElement, File workingDirectory) throws Exception; + abstract public void processRun(FlowRun srcRun, ExperimentRunType runElement, FileLike workingDirectory) throws Exception; protected void addDataLSID(InputOutputRefsType refs, String lsid, InputRole role) { diff --git a/flow/src/org/labkey/flow/script/CompensationCalculationHandler.java b/flow/src/org/labkey/flow/script/CompensationCalculationHandler.java index 6d49dd6b6..65480bf3d 100644 --- a/flow/src/org/labkey/flow/script/CompensationCalculationHandler.java +++ b/flow/src/org/labkey/flow/script/CompensationCalculationHandler.java @@ -29,9 +29,9 @@ import org.fhcrc.cpas.exp.xml.ExperimentRunType; import org.fhcrc.cpas.exp.xml.ProtocolApplicationBaseType; import org.fhcrc.cpas.exp.xml.DataBaseType; +import org.labkey.vfs.FileLike; import java.util.*; -import java.io.File; public class CompensationCalculationHandler extends BaseHandler { @@ -46,7 +46,7 @@ public CompensationCalculationHandler(ScriptJob job, SettingsDef settings, Compe } @Override - public void processRun(FlowRun run, ExperimentRunType runElement, File outputDirectory) throws Exception + public void processRun(FlowRun run, ExperimentRunType runElement, FileLike outputDirectory) throws Exception { _job.addStatus("Calculating compensation matrix for " + run.getName()); List uris = FlowAnalyzer.getFCSRefs(run); diff --git a/flow/src/org/labkey/flow/script/FlowAnalyzer.java b/flow/src/org/labkey/flow/script/FlowAnalyzer.java index 917da9aae..0767979be 100644 --- a/flow/src/org/labkey/flow/script/FlowAnalyzer.java +++ b/flow/src/org/labkey/flow/script/FlowAnalyzer.java @@ -22,8 +22,8 @@ import org.labkey.flow.analysis.model.*; import org.labkey.flow.analysis.web.*; import org.labkey.flow.data.*; +import org.labkey.vfs.FileLike; -import java.io.File; import java.net.URI; import java.util.*; @@ -113,7 +113,7 @@ static public CompensationMatrix getCompensationMatrix(FlowRun run) return comp.getCompensationMatrix(); } - synchronized static public File getAnalysisDirectory() + synchronized static public FileLike getAnalysisDirectory() { return FlowSettings.getWorkingDirectory(); } diff --git a/flow/src/org/labkey/flow/script/FlowExperimentJob.java b/flow/src/org/labkey/flow/script/FlowExperimentJob.java index 8396bae55..27acf3efd 100644 --- a/flow/src/org/labkey/flow/script/FlowExperimentJob.java +++ b/flow/src/org/labkey/flow/script/FlowExperimentJob.java @@ -22,6 +22,7 @@ import org.labkey.api.util.FileUtil; import org.labkey.api.util.GUID; import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.Path; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.flow.FlowSettings; @@ -30,6 +31,7 @@ import org.labkey.flow.data.FlowProtocolStep; import org.labkey.flow.data.FlowRun; import org.labkey.flow.persist.InputRole; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; @@ -46,7 +48,7 @@ public abstract class FlowExperimentJob extends FlowJob { protected static Logger _log = getJobLogger(ScriptJob.class); - protected File _containerFolder; + protected FileLike _containerFolder; FlowProtocolStep _step; String _experimentLSID; String _experimentName; @@ -68,10 +70,7 @@ public FlowExperimentJob(ViewBackgroundInfo info, PipeRoot root, String experime private void initStatus() throws IOException { - String guid = GUID.makeGUID(); - File logFile = FileUtil.appendName(_containerFolder, guid + ".flow.log"); - logFile.createNewFile(); - setLogFile(logFile); + setLogFile(_containerFolder.resolveChild(GUID.makeGUID() + ".flow.log")); } @Override @@ -94,7 +93,7 @@ public ActionURL urlData() return experiment.urlShow(); } - public List findRuns(File path, FlowProtocolStep step) + public List findRuns(FileLike path, FlowProtocolStep step) { FlowExperiment experiment = getExperiment(); if (experiment == null) @@ -123,7 +122,7 @@ public void error(String message, Throwable t) } } - protected boolean checkProcessPath(File path, FlowProtocolStep step) + protected boolean checkProcessPath(FileLike path, FlowProtocolStep step) { List existing = findRuns(path, step); if (!existing.isEmpty()) @@ -135,45 +134,45 @@ protected boolean checkProcessPath(File path, FlowProtocolStep step) return true; } - protected File getWorkingFolder(Container container) throws IOException + protected FileLike getWorkingFolder(Container container) throws IOException { - File dirRoot = FlowAnalyzer.getAnalysisDirectory(); - File dirFolder = FileUtil.appendName(dirRoot, "Folder" + container.getRowId()); + FileLike dirRoot = FlowAnalyzer.getAnalysisDirectory(); + FileLike dirFolder = dirRoot.resolveChild("Folder" + container.getRowId()); if (!dirFolder.exists()) { if (!FileUtil.mkdirs(dirFolder)) - throw new IOException("Failed to create flow wokring directory: " + dirFolder.getAbsolutePath()); + throw new IOException("Failed to create flow working directory: " + dirFolder); } return dirFolder; } - public File createAnalysisDirectory(File runDirectory, FlowProtocolStep step) throws Exception + public FileLike createAnalysisDirectory(FileLike runDirectory, FlowProtocolStep step) throws IOException { return createAnalysisDirectory(runDirectory.getName(), step); } - public File createAnalysisDirectory(String dirName, FlowProtocolStep step) throws Exception + public FileLike createAnalysisDirectory(String dirName, FlowProtocolStep step) throws IOException { - File dirFolder = getWorkingFolder(getContainer()); - File dirRun = FileUtil.appendName(dirFolder, dirName); + FileLike dirFolder = getWorkingFolder(getContainer()); + FileLike dirRun = dirFolder.resolveFile(Path.parse(dirName)); if (!dirRun.exists()) { if (!FileUtil.mkdirs(dirRun)) - throw new IOException("Could not create analysis directory: " + dirRun.getAbsolutePath()); + throw new IOException("Could not create analysis directory: " + dirRun); } for (int i = 1; ; i ++) { - File dirData = FileUtil.appendName(dirRun, step.getLabel() + i); + FileLike dirData = dirRun.resolveChild(step.getLabel() + i); if (!dirData.exists()) { if (!FileUtil.mkdirs(dirData)) - throw new IOException("Could not create analysis directory: " + dirData.getAbsolutePath()); + throw new IOException("Could not create analysis directory: " + dirData); return dirData; } } } - public void deleteAnalysisDirectory(File directory) + public void deleteAnalysisDirectory(FileLike directory) { if (!FlowSettings.isDeleteFiles()) return; @@ -181,7 +180,7 @@ public void deleteAnalysisDirectory(File directory) return; try { - File dirCompare = FlowAnalyzer.getAnalysisDirectory(); + FileLike dirCompare = FlowAnalyzer.getAnalysisDirectory(); if (!directory.toString().startsWith(dirCompare.toString())) { return; @@ -194,14 +193,14 @@ public void deleteAnalysisDirectory(File directory) } } - synchronized public File decideFileName(File directory, String name, String extension) + synchronized public FileLike decideFileName(FileLike directory, String name, String extension) { - File fileTry = FileUtil.appendName(directory, name + "." + extension); + FileLike fileTry = directory.resolveChild(name + "." + extension); if (!fileTry.exists()) return fileTry; for (int i = 1; ; i++) { - fileTry = FileUtil.appendName(directory, name + i + "." + extension); + fileTry = directory.resolveChild(name + i + "." + extension); if (!fileTry.exists()) return fileTry; } diff --git a/flow/src/org/labkey/flow/script/ImportResultsJob.java b/flow/src/org/labkey/flow/script/ImportResultsJob.java index 33e18e044..d24adaf79 100644 --- a/flow/src/org/labkey/flow/script/ImportResultsJob.java +++ b/flow/src/org/labkey/flow/script/ImportResultsJob.java @@ -33,7 +33,6 @@ import org.labkey.api.exp.api.ExperimentService; import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.security.User; -import org.labkey.api.util.FileUtil; import org.labkey.api.util.Tuple3; import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.api.writer.FileSystemFile; @@ -48,11 +47,11 @@ import org.labkey.flow.persist.AttributeSet; import org.labkey.flow.persist.AttributeSetHelper; import org.labkey.flow.persist.InputRole; +import org.labkey.vfs.FileLike; import java.io.File; import java.net.URI; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; @@ -68,16 +67,16 @@ */ public class ImportResultsJob extends AbstractExternalAnalysisJob { - private File _analysisPathRoot = null; + private FileLike _analysisPathRoot = null; private String _analysisRunName = null; @JsonCreator protected ImportResultsJob( @JsonProperty("_analysisEngine") AnalysisEngine analysisEngine, @JsonProperty("_experiment") FlowExperiment experiment, - @JsonProperty("_originalImportedFile") File originalImportedFile, - @JsonProperty("_runFilePathRoot") File runFilePathRoot, - @JsonProperty("_keywordDirs") List keywordDirs, + @JsonProperty("_originalImportedFile") FileLike originalImportedFile, + @JsonProperty("_runFilePathRoot") FileLike runFilePathRoot, + @JsonProperty("_keywordDirs") List keywordDirs, @JsonProperty("_targetStudy") Container targetStudy, @JsonProperty("_failOnError") boolean failOnError) { @@ -88,10 +87,10 @@ public ImportResultsJob(ViewBackgroundInfo info, PipeRoot root, FlowExperiment experiment, AnalysisEngine analysisEngine, - File analysisPathRoot, - File originalImportedFile, - File runFilePathRoot, - List keywordDirs, + FileLike analysisPathRoot, + FileLike originalImportedFile, + FileLike runFilePathRoot, + List keywordDirs, SampleIdMap selectedFCSFiles, String analysisRunName, Container targetStudy, @@ -181,7 +180,7 @@ protected FlowRun createExperimentRun() throws Exception } else if (getRunFilePathRoot() != null) { - file = FileUtil.appendName(getRunFilePathRoot(), sampleLabel); + file = getRunFilePathRoot().resolveChild(sampleLabel).toNioPathForRead().toFile(); uri = file.toURI(); } @@ -212,7 +211,7 @@ else if (getRunFilePathRoot() != null) // UNDONE: comp matrix } - File statisticsFile = FileUtil.appendName(_analysisPathRoot, AnalysisSerializer.STATISTICS_FILENAME); + FileLike statisticsFile = _analysisPathRoot.resolveChild(AnalysisSerializer.STATISTICS_FILENAME); FlowRun run = saveAnalysis(getUser(), getContainer(), getExperiment(), _analysisRunName, statisticsFile, getOriginalImportedFile(), @@ -229,16 +228,16 @@ _analysisRunName, statisticsFile, getOriginalImportedFile(), ); // Add attachments to the run - File attachmentsDir = FileUtil.appendName(_analysisPathRoot, "attachments"); + FileLike attachmentsDir = _analysisPathRoot.resolveChild("attachments"); if (attachmentsDir.isDirectory()) { AttachmentService svc = AttachmentService.get(); - File[] files = attachmentsDir.listFiles(File::isFile); - if (files != null && files.length > 0) + List files = attachmentsDir.getChildren(FileLike::isFile); + if (!files.isEmpty()) { AttachmentParent parent = new ExpRunAttachmentParent(run.getExperimentRun()); - info("Attaching files to run: " + Arrays.stream(files).map(File::getName).collect(joining(", "))); - svc.addAttachments(parent, Arrays.stream(files).map(FileAttachmentFile::new).collect(toList()), getUser()); + info("Attaching files to run: " + files.stream().map(FileLike::getName).collect(joining(", "))); + svc.addAttachments(parent, files.stream().map(FileAttachmentFile::new).collect(toList()), getUser()); } } @@ -250,8 +249,8 @@ protected ExpData createExternalAnalysisData(ExperimentService svc, ExpRun externalAnalysisRun, User user, Container container, String analysisName, - File externalAnalysisFile, - File originalImportedFile) + FileLike externalAnalysisFile, + FileLike originalImportedFile) { addStatus("Saving External Analysis " + originalImportedFile.getName()); ExpData data = svc.createData(container, new DataType("Flow-ExternalAnalysis")); diff --git a/flow/src/org/labkey/flow/script/KeywordsHandler.java b/flow/src/org/labkey/flow/script/KeywordsHandler.java index 6c9341284..acbab7ad7 100644 --- a/flow/src/org/labkey/flow/script/KeywordsHandler.java +++ b/flow/src/org/labkey/flow/script/KeywordsHandler.java @@ -43,6 +43,7 @@ import org.labkey.flow.persist.AttributeSet; import org.labkey.flow.persist.FlowDataHandler; import org.labkey.flow.persist.InputRole; +import org.labkey.vfs.FileLike; import java.io.File; import java.util.ArrayList; @@ -54,28 +55,6 @@ public class KeywordsHandler extends BaseHandler { Pattern _fcsFilePattern; - protected boolean shouldUploadKeyword(String name) - { - if (true) - return true; - if (name.startsWith("$")) - { - return name.equals("$FIL") || name.equals("$DATE") || name.equals("$TOT") || name.startsWith("$P") && name.endsWith("V"); - } - if (name.endsWith("DISPLAY")) - { - return false; - } - if (name.equals("SPILL") || - name.equals("WINDOW EXTENSION") || - name.equals("APPLY COMPENSATION") || - name.equals("CREATOR") || - name.equals("FSC ASF") || - name.equals("THRESHOLD")) - return false; - return true; - } - public KeywordsHandler(ScriptJob job) { super(job, FlowProtocolStep.keywords); @@ -119,12 +98,12 @@ protected void error(String msg, Throwable t) _job.error(msg, t); } - protected FlowRun addRun(File directory, List data) throws Exception + protected FlowRun addRun(FileLike directory, List data) throws Exception { ExperimentArchiveDocument xarDoc = _job.createExperimentArchive(); ExperimentArchiveType xar = xarDoc.getExperimentArchive(); String runName; - File runDirectory = _job.createAnalysisDirectory(directory, FlowProtocolStep.keywords); + FileLike runDirectory = _job.createAnalysisDirectory(directory, FlowProtocolStep.keywords); runName = directory.getName(); @@ -187,20 +166,20 @@ protected FlowRun addRun(File directory, List data) throws Excep } _job.finishExperimentRun(xar, run); _job.importRuns(xarDoc, directory, runDirectory, FlowProtocolStep.keywords); - _job.deleteAnalysisDirectory(runDirectory.getParentFile()); + _job.deleteAnalysisDirectory(runDirectory.getParent()); return FlowRun.fromLSID(run.getAbout()); } - protected FlowRun importRun(File directory, Container targetStudy) throws Exception + protected FlowRun importRun(FileLike directory, Container targetStudy) throws Exception { addStatus("Reading keywords from directory " + directory); - File[] files = directory.listFiles(); - List lstFileData = new ArrayList(); + List files = directory.getChildren(); + List lstFileData = new ArrayList<>(); - for (int i = 0; i < files.length; i ++) + for (FileLike fileLike : files) { - File file = files[i]; + File file = fileLike.toNioPathForRead().toFile(); if (!isFCSFile(file)) continue; @@ -237,12 +216,12 @@ protected FCSAnalyzer getAnalyzer() } @Override - public void processRun(FlowRun srcRun, ExperimentRunType runElement, File workingDirectory) + public void processRun(FlowRun srcRun, ExperimentRunType runElement, FileLike workingDirectory) { throw new UnsupportedOperationException(); } - public FlowRun run(File directory, Container targetStudy) throws Exception + public FlowRun run(FileLike directory, Container targetStudy) throws Exception { return importRun(directory, targetStudy); } diff --git a/flow/src/org/labkey/flow/script/KeywordsJob.java b/flow/src/org/labkey/flow/script/KeywordsJob.java index a20fb2cd0..310c6a8f1 100644 --- a/flow/src/org/labkey/flow/script/KeywordsJob.java +++ b/flow/src/org/labkey/flow/script/KeywordsJob.java @@ -28,6 +28,7 @@ import org.labkey.flow.data.FlowProtocolStep; import org.labkey.flow.data.FlowRun; import org.labkey.flow.data.FlowScript; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; @@ -42,7 +43,7 @@ public class KeywordsJob extends ScriptJob { private static final Logger _log = LogManager.getLogger(KeywordsJob.class); - private final List _paths; + private final List _paths; private final Container _targetStudy; @JsonCreator @@ -50,7 +51,7 @@ protected KeywordsJob( @JsonProperty("_pendingRunLSIDs") List pendingRunLSIDs, @JsonProperty("_processedRunLSIDs") Map> processedRunLSIDs, @JsonProperty("_runAnalysisScript") FlowScript runAnalysisScript, - @JsonProperty("_paths") List paths, + @JsonProperty("_paths") List paths, @JsonProperty("_targetStudy") Container targetStudy ) { @@ -59,7 +60,7 @@ protected KeywordsJob( _targetStudy = targetStudy; } - public KeywordsJob(ViewBackgroundInfo info, FlowProtocol protocol, List paths, Container targetStudy, PipeRoot root) throws IOException + public KeywordsJob(ViewBackgroundInfo info, FlowProtocol protocol, List paths, Container targetStudy, PipeRoot root) throws IOException { super(info, FlowExperiment.getExperimentRunExperimentName(info.getContainer()), FlowExperiment.getExperimentRunExperimentLSID(info.getContainer()), protocol, null, FlowProtocolStep.keywords, root); @@ -77,7 +78,7 @@ public List go() { List runs = new ArrayList<>(); - for (File path : _paths) + for (FileLike path : _paths) { if (checkInterrupted()) return runs; diff --git a/flow/src/org/labkey/flow/script/KeywordsTask.java b/flow/src/org/labkey/flow/script/KeywordsTask.java index b4d4dae92..5bc7a9d96 100644 --- a/flow/src/org/labkey/flow/script/KeywordsTask.java +++ b/flow/src/org/labkey/flow/script/KeywordsTask.java @@ -29,6 +29,7 @@ import org.labkey.flow.data.FlowProperty; import org.labkey.flow.data.FlowProtocol; import org.labkey.flow.data.FlowRun; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; @@ -63,12 +64,12 @@ public RecordedActionSet run() return new RecordedActionSet(); } - public static List importFlowRuns(PipelineJob job, FlowProtocol protocol, List paths, Container targetStudyContainer) throws IOException, SQLException + public static List importFlowRuns(PipelineJob job, FlowProtocol protocol, List paths, Container targetStudyContainer) throws IOException, SQLException { PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); KeywordsJob keywordsJob = new KeywordsJob(job.getInfo(), protocol, paths, targetStudyContainer, pr); - keywordsJob.setLogFile(job.getLogFilePath()); + keywordsJob.setLogFile(job.getLogFileLike()); keywordsJob.setLogLevel(job.getLogLevel()); keywordsJob.setSubmitted(); diff --git a/flow/src/org/labkey/flow/script/ScriptJob.java b/flow/src/org/labkey/flow/script/ScriptJob.java index 06ca421f9..b9fa52286 100644 --- a/flow/src/org/labkey/flow/script/ScriptJob.java +++ b/flow/src/org/labkey/flow/script/ScriptJob.java @@ -48,16 +48,16 @@ import org.labkey.flow.data.FlowScript; import org.labkey.flow.persist.FlowManager; import org.labkey.flow.persist.InputRole; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.io.IOException; import java.util.ArrayList; -import java.util.Comparator; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.TreeMap; abstract public class ScriptJob extends FlowExperimentJob { @@ -200,7 +200,8 @@ public FlowRun executeHandler(FlowRun srcRun, BaseHandler handler) throws Except { ExperimentArchiveDocument doc = createExperimentArchive(); ExperimentRunType runElement = addExperimentRun(doc.getExperimentArchive(), handler.getRunName(srcRun)); - File workingDirectory = createAnalysisDirectory(new File(srcRun.getPath()), handler._step); + FileLike srcRunFile = FileSystemLike.wrapFile(new File(srcRun.getPath())); + FileLike workingDirectory = createAnalysisDirectory(srcRunFile, handler._step); try { handler.processRun(srcRun, runElement, workingDirectory); @@ -213,10 +214,10 @@ public FlowRun executeHandler(FlowRun srcRun, BaseHandler handler) throws Except if (!hasErrors()) { finishExperimentRun(doc.getExperimentArchive(), runElement); - importRuns(doc, new File(srcRun.getPath()), workingDirectory, handler._step); + importRuns(doc, srcRunFile, workingDirectory, handler._step); } - deleteAnalysisDirectory(workingDirectory.getParentFile()); + deleteAnalysisDirectory(workingDirectory.getParent()); return FlowRun.fromLSID(runElement.getAbout()); } @@ -239,19 +240,6 @@ public String getDescription() return "Upload"; } - public Map getProcessedRunLSIDs() - { - TreeMap ret = new TreeMap<>(Comparator.comparingInt(FlowProtocolStep::getDefaultActionSequence)); - synchronized(_processedRunLSIDs) - { - for (Map.Entry> entry : _processedRunLSIDs.entrySet()) - { - ret.put(entry.getKey(), entry.getValue().toArray(new String[0])); - } - } - return ret; - } - public ExperimentArchiveDocument createExperimentArchive() { ExperimentArchiveDocument xarDoc = ExperimentArchiveDocument.Factory.newInstance(); @@ -392,7 +380,7 @@ public void addInput(ProtocolApplicationBaseType app, FlowDataObject data, Input } - public void importRuns(ExperimentArchiveDocument xardoc, File root, File workingDirectory, FlowProtocolStep step) + public void importRuns(ExperimentArchiveDocument xardoc, FileLike root, FileLike workingDirectory, FlowProtocolStep step) { if (xardoc.getExperimentArchive().getExperimentRuns().getExperimentRunArray().length > 0) { @@ -420,12 +408,7 @@ private void addRunsLSIDs(FlowProtocolStep step, List lsids) { synchronized(_processedRunLSIDs) { - List list = _processedRunLSIDs.get(step); - if (list == null) - { - list = new ArrayList<>(); - _processedRunLSIDs.put(step, list); - } + List list = _processedRunLSIDs.computeIfAbsent(step, k -> new ArrayList<>()); list.addAll(lsids); } } diff --git a/flow/src/org/labkey/flow/script/ScriptXarSource.java b/flow/src/org/labkey/flow/script/ScriptXarSource.java index f5a3e2fe8..5f2c258da 100644 --- a/flow/src/org/labkey/flow/script/ScriptXarSource.java +++ b/flow/src/org/labkey/flow/script/ScriptXarSource.java @@ -22,35 +22,34 @@ import org.labkey.api.exp.XarSource; import org.labkey.api.pipeline.PipelineJob; import org.labkey.api.util.FileUtil; +import org.labkey.api.writer.PrintWriters; import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; -import java.io.File; -import java.io.FileWriter; +import java.io.Writer; import java.nio.file.Path; public class ScriptXarSource extends XarSource { private static final Logger _log = LogManager.getLogger(ScriptXarSource.class); - File _root; - File _workingDirectory; - File _logFile; + FileLike _root; + FileLike _workingDirectory; + FileLike _logFile; ExperimentArchiveDocument _doc; - public ScriptXarSource(ExperimentArchiveDocument doc, File root, File workingDirectory, PipelineJob job) + public ScriptXarSource(ExperimentArchiveDocument doc, FileLike root, FileLike workingDirectory, PipelineJob job) { super(job); _root = root; _doc = doc; _workingDirectory = workingDirectory; - _logFile = FileUtil.appendName(_workingDirectory, "flow.xar.log"); + _logFile = _workingDirectory.resolveChild("flow.xar.log"); // For informational purposes, write out the XAR file. try { - File xarfile = FileUtil.appendName(_workingDirectory, "flow.xar.xml"); + FileLike xarfile = _workingDirectory.resolveChild("flow.xar.xml"); - try (FileWriter writer = new FileWriter(xarfile)) + try (Writer writer = PrintWriters.getPrintWriter(xarfile.openOutputStream())) { writer.write(doc.toString()); } @@ -70,7 +69,7 @@ public String canonicalizeDataFileURL(String dataFileURL) @Override public Path getRootPath() { - return null != _root ? _root.toPath() : null; + return null != _root ? _root.toNioPathForRead() : null; } @Override @@ -89,6 +88,6 @@ public ExperimentArchiveDocument getDocument() @Override public FileLike getLogFilePath() { - return FileSystemLike.wrapFile(_logFile); + return _logFile; } } diff --git a/flow/src/org/labkey/flow/script/WorkspaceJob.java b/flow/src/org/labkey/flow/script/WorkspaceJob.java index 7965d2c02..47ee491ec 100644 --- a/flow/src/org/labkey/flow/script/WorkspaceJob.java +++ b/flow/src/org/labkey/flow/script/WorkspaceJob.java @@ -58,10 +58,9 @@ import org.labkey.flow.persist.ObjectType; import org.labkey.flow.query.FlowSchema; import org.labkey.flow.query.FlowTableType; +import org.labkey.vfs.FileLike; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; @@ -79,19 +78,19 @@ */ public class WorkspaceJob extends AbstractExternalAnalysisJob { - private final File _workspaceFile; + private final FileLike _workspaceFile; private final String _workspaceName; @JsonCreator protected WorkspaceJob( @JsonProperty("_analysisEngine") AnalysisEngine analysisEngine, @JsonProperty("_experiment") FlowExperiment experiment, - @JsonProperty("_originalImportedFile") File originalImportedFile, - @JsonProperty("_runFilePathRoot") File runFilePathRoot, - @JsonProperty("_keywordDirs") List keywordDirs, + @JsonProperty("_originalImportedFile") FileLike originalImportedFile, + @JsonProperty("_runFilePathRoot") FileLike runFilePathRoot, + @JsonProperty("_keywordDirs") List keywordDirs, @JsonProperty("_targetStudy") Container targetStudy, @JsonProperty("_failOnError") boolean failOnError, - @JsonProperty("_workspaceFile") File workspaceFile, + @JsonProperty("_workspaceFile") FileLike workspaceFile, @JsonProperty("_workspaceName") String workspaceName) { super(analysisEngine, experiment, originalImportedFile, runFilePathRoot, keywordDirs, targetStudy, failOnError); @@ -103,9 +102,9 @@ public WorkspaceJob(ViewBackgroundInfo info, PipeRoot root, FlowExperiment experiment, WorkspaceData workspaceData, - File originalImportedFile, - File runFilePathRoot, - List keywordDirs, + FileLike originalImportedFile, + FileLike runFilePathRoot, + List keywordDirs, SampleIdMap selectedFCSFiles, //List importGroupNames, Container targetStudy, @@ -119,17 +118,17 @@ public WorkspaceJob(ViewBackgroundInfo info, { String[] parts = workspaceData.getPath().split(File.pathSeparator); if (parts.length > 0) - name = parts[parts.length]; + name = parts[parts.length - 1]; } if (name == null) name = "workspace"; _workspaceName = name; _workspaceFile = FileUtil.createTempFile(_workspaceName, null, FlowSettings.getWorkingDirectory()); - ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(_workspaceFile)); - oos.writeObject(workspaceData.getWorkspaceObject()); - oos.flush(); - oos.close(); + try (ObjectOutputStream oos = new ObjectOutputStream(_workspaceFile.openOutputStream())) + { + oos.writeObject(workspaceData.getWorkspaceObject()); + } } @Override @@ -148,7 +147,7 @@ protected void doRun() throws Throwable @Override protected FlowRun createExperimentRun() throws Exception { - try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(_workspaceFile))) + try (ObjectInputStream ois = new ObjectInputStream(_workspaceFile.openInputStream())) { Workspace workspace = (Workspace)ois.readObject(); @@ -163,8 +162,8 @@ protected FlowRun createExperimentRun() throws Exception private FlowRun createExperimentRun(User user, Container container, Workspace workspace, FlowExperiment experiment, - String workspaceName, File workspaceFile, File originalImportedFile, - File runFilePathRoot, SampleIdMap resolvedFCSFiles, + String workspaceName, FileLike workspaceFile, FileLike originalImportedFile, + FileLike runFilePathRoot, SampleIdMap resolvedFCSFiles, boolean failOnError) throws Exception { SampleIdMap keywordsMap = new SampleIdMap<>(); @@ -256,7 +255,7 @@ private List getSampleIDs(Workspace workspace, SampleIdMap private boolean extractAnalysis(Container container, Workspace workspace, - File runFilePathRoot, + FileLike runFilePathRoot, SampleIdMap selectedFCSFiles, //List importGroupNames, boolean failOnError, @@ -306,7 +305,7 @@ private boolean extractAnalysis(Container container, } else if (runFilePathRoot != null) { - file = FileUtil.appendName(runFilePathRoot, sample.getLabel()); + file = runFilePathRoot.resolveChild(sample.getLabel()).toNioPathForRead().toFile(); uri = file.toURI(); } // Don't set FCSFile uri unless the file actually exists on disk. @@ -400,8 +399,8 @@ protected ExpData createExternalAnalysisData(ExperimentService svc, ExpRun externalAnalysisRun, User user, Container container, String analysisName, - File externalAnalysisFile, - File originalImportedFile) + FileLike externalAnalysisFile, + FileLike originalImportedFile) { addStatus("Saving Workspace Analysis " + originalImportedFile.getName()); ExpData workspaceData = svc.createData(container, FlowDataType.Workspace); diff --git a/luminex/src/org/labkey/luminex/LuminexExclusionPipelineJob.java b/luminex/src/org/labkey/luminex/LuminexExclusionPipelineJob.java index 4d1a01b06..5a57ad438 100644 --- a/luminex/src/org/labkey/luminex/LuminexExclusionPipelineJob.java +++ b/luminex/src/org/labkey/luminex/LuminexExclusionPipelineJob.java @@ -44,7 +44,7 @@ public LuminexExclusionPipelineJob(ViewBackgroundInfo info, PipeRoot root, Lumin { super(LuminexAssayProvider.NAME, info, root); - setLogFile(root.getLogDirectoryFileLike(true).resolveChild(FileUtil.makeFileNameWithTimestamp("luminex_exclusion", "log"))); + setLogFile(root.getLogDirectory(true).resolveChild(FileUtil.makeFileNameWithTimestamp("luminex_exclusion", "log"))); _form = form; _exclusionType = LuminexManager.ExclusionType.valueOf(form.getTableName()); diff --git a/ms2/src/org/labkey/ms2/MS2Controller.java b/ms2/src/org/labkey/ms2/MS2Controller.java index 512455407..212ea0124 100644 --- a/ms2/src/org/labkey/ms2/MS2Controller.java +++ b/ms2/src/org/labkey/ms2/MS2Controller.java @@ -1454,7 +1454,7 @@ public ModelAndView getView(ProteinDisambiguationForm form, BindException errors targetURL.addParameters(params); - // Track all of the unique sequences + // Track all the unique sequences Set sequences = new HashSet<>(); List proteins = new TableSelector(tableInfo, null, new Sort("BestName")).getArrayList(Protein.class); Pair> actionWithProteins = new Pair<>(targetURL, proteins); @@ -4826,7 +4826,7 @@ public void export(DetailsForm form, HttpServletResponse response, BindException { throw new NotFoundException("Could not find parameters file for run '" + run.getFileName() + "'."); } - PageFlowUtil.streamFile(response, paramsFile, false); + PageFlowUtil.streamFile(response, paramsFile.toPath(), false); } } diff --git a/ms2/src/org/labkey/ms2/PepXmlImporter.java b/ms2/src/org/labkey/ms2/PepXmlImporter.java index 59524e516..c1d6cc039 100644 --- a/ms2/src/org/labkey/ms2/PepXmlImporter.java +++ b/ms2/src/org/labkey/ms2/PepXmlImporter.java @@ -26,6 +26,7 @@ import org.labkey.api.security.User; import org.labkey.api.util.FileUtil; import org.labkey.api.util.NetworkDrive; +import org.labkey.api.util.Path; import org.labkey.api.util.PepXMLFileType; import org.labkey.api.util.massSpecDataFileType; import org.labkey.ms2.reader.MS2Loader; @@ -181,19 +182,19 @@ private void writeFractionInfo(MS2Loader.PeptideFraction fraction) throws IOExce // First, check two directories up from the MS2 results. This is where searches done through the CPAS // pipeline will be File pepXmlDir = new File(_path); - File mzXMLFile = null; + FileLike mzXMLFile = null; massSpecDataFileType FT_MZXML = new massSpecDataFileType(); if (pepXmlDir.getParentFile() != null && pepXmlDir.getParentFile().getParentFile() != null) { - mzXMLFile = FT_MZXML.getFile(pepXmlDir.getParentFile().getParentFile(), newFilename); + mzXMLFile = FT_MZXML.getFile(FileSystemLike.wrapFile(pepXmlDir.getParentFile().getParentFile()), newFilename); } if (!NetworkDrive.exists(mzXMLFile)) { // If not there, look in the same directory as the MS2 results - mzXMLFile = FT_MZXML.getFile(pepXmlDir, newFilename); + mzXMLFile = FT_MZXML.getFile(FileSystemLike.wrapFile(pepXmlDir), newFilename); } - fraction.setSpectrumPath(mzXMLFile.getAbsolutePath()); + fraction.setSpectrumPath(mzXMLFile.toNioPathForRead().toFile().getAbsolutePath()); } if (! NetworkDrive.exists(new File(_path + "/" + _gzFileName)) && baseName != null) { @@ -226,6 +227,7 @@ protected static String switchSuffix(String filename, String suffix) protected void processSpectrumFile(PepXmlFraction fraction, Set scans, MS2Progress progress, boolean shouldLoadSpectra, boolean shouldLoadRetentionTimes) { File mzXmlFile = getMzXMLFile(fraction); + FileLike mzXmlFileLike = null; if ((_run.getType().equalsIgnoreCase(MS2RunType.Mascot.name())||_run.getType().equalsIgnoreCase(MS2RunType.Sequest.name())) // TODO: Move this check (perhaps all the code) into the appropriate run classes && null == mzXmlFile) { @@ -235,13 +237,13 @@ protected void processSpectrumFile(PepXmlFraction fraction, Set scans, String baseName = _gzFileName; baseName = baseName.replaceAll("\\.pep\\.tgz$", ""); massSpecDataFileType FT_MZXML = new massSpecDataFileType(); - File path = new File(_path,""); - File engineProtocolMzXMLFile = FT_MZXML.getFile(path, baseName); + File path = new File(_path); + FileLike engineProtocolMzXMLFile = FT_MZXML.getFile(FileSystemLike.wrapFile(path), baseName); String mzXmlFileName = engineProtocolMzXMLFile.getName(); - File engineProtocolDir = engineProtocolMzXMLFile.getParentFile(); - File engineDir = engineProtocolDir.getParentFile(); - File mzXMLFile = new File(engineDir.getParent(), mzXmlFileName); - mzXmlFile = mzXMLFile.getAbsoluteFile(); + FileLike engineProtocolDir = engineProtocolMzXMLFile.getParent(); + FileLike engineDir = engineProtocolDir.getParent(); + FileLike mzXMLFile = engineDir.getParent().resolveFile(Path.parse(mzXmlFileName)); + mzXmlFileLike = mzXMLFile; } String gzFileName = _path + "/" + _gzFileName; File gzFile = _context.findFile(gzFileName); @@ -258,7 +260,7 @@ protected void processSpectrumFile(PepXmlFraction fraction, Set scans, } } - SpectrumImporter sl = new SpectrumImporter(gzFileName, "", FileSystemLike.wrapFile(mzXmlFile), scans, progress, _fractionId, _log, shouldLoadSpectra, shouldLoadRetentionTimes); + SpectrumImporter sl = new SpectrumImporter(gzFileName, "", mzXmlFileLike, scans, progress, _fractionId, _log, shouldLoadSpectra, shouldLoadRetentionTimes); sl.upload(); updateFractionSpectrumFileName(sl.getFile() == null ? null : sl.getFile().toNioPathForRead().toFile()); } diff --git a/ms2/src/org/labkey/ms2/PeptideImporter.java b/ms2/src/org/labkey/ms2/PeptideImporter.java index 6f551b057..4119af4e9 100644 --- a/ms2/src/org/labkey/ms2/PeptideImporter.java +++ b/ms2/src/org/labkey/ms2/PeptideImporter.java @@ -26,15 +26,17 @@ import org.labkey.api.protein.fasta.FastaDbLoader; import org.labkey.api.query.FieldKey; import org.labkey.api.security.User; -import org.labkey.api.util.FileUtil; import org.labkey.api.util.NetworkDrive; import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.Path; import org.labkey.ms2.pipeline.MS2PipelineManager; import org.labkey.ms2.reader.AbstractQuantAnalysisResult; import org.labkey.ms2.reader.MS2Loader; import org.labkey.ms2.reader.PeptideProphetHandler; import org.labkey.ms2.reader.PeptideProphetSummary; import org.labkey.ms2.reader.RelativeQuantAnalysisSummary; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.io.IOException; @@ -133,32 +135,34 @@ public void writeRunInfo(MS2Loader.PeptideFraction fraction, MS2Progress progres // Handle PEAKS FASTA references for (String dbName : fraction.getDatabaseParameterValues()) { - File database = null; + FileLike database = null; // First look in the FASTA directory, with and without a .fasta extension - File databaseRoot = MS2PipelineManager.getSequenceDatabaseRoot(_container, true); + FileLike databaseRoot = MS2PipelineManager.getSequenceDatabaseRoot(_container, true); if (NetworkDrive.exists(databaseRoot)) { - database = FileUtil.appendName(databaseRoot, dbName); + database = databaseRoot.resolveFile(Path.parse(dbName)); if (!NetworkDrive.exists(database)) { - database = FileUtil.appendName(databaseRoot, dbName + ".fasta"); + database = databaseRoot.resolveFile(Path.parse(dbName + ".fasta")); } } + File dbPath = new File(_path); + // Also try relative to the file being imported, with and without a .fasta extension - if (!NetworkDrive.exists(database)) + if (!NetworkDrive.exists(database) && dbPath.exists()) { - database = FileUtil.appendName(new File(_path), dbName); + database = FileSystemLike.wrapFile(dbPath).resolveFile(Path.parse(dbName)); } - if (!NetworkDrive.exists(database)) + if (!NetworkDrive.exists(database) && dbPath.exists()) { - database = FileUtil.appendName(new File(_path), dbName + ".fasta"); + database = FileSystemLike.wrapFile(dbPath).resolveFile(Path.parse(dbName + ".fasta")); } if (NetworkDrive.exists(database)) { - dbPaths.add(database.getAbsolutePath()); + dbPaths.add(database.toNioPathForRead().toFile().getAbsolutePath()); } else { diff --git a/ms2/src/org/labkey/ms2/ProteinProphetImporter.java b/ms2/src/org/labkey/ms2/ProteinProphetImporter.java index 00915161f..9048ed78c 100644 --- a/ms2/src/org/labkey/ms2/ProteinProphetImporter.java +++ b/ms2/src/org/labkey/ms2/ProteinProphetImporter.java @@ -370,7 +370,7 @@ private MS2Run importRun(ViewBackgroundInfo info, Logger log) throws IOException if (TPPTask.isProtXMLFile(_file.getParent().toNioPathForRead().toFile())) { String baseName = TPPTask.FT_PROT_XML.getBaseName(_file); - pepXMLFile = TPPTask.getPepXMLFile(_file.getParent().toNioPathForRead().toFile(), baseName); + pepXMLFile = TPPTask.getPepXMLFile(_file.getParent(), baseName).toNioPathForRead().toFile(); attemptedFiles.add(pepXMLFile.getAbsolutePath()); if (!NetworkDrive.exists(pepXMLFile)) { diff --git a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineJob.java b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineJob.java index 5f5a970ff..0dbcb5a6a 100644 --- a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineJob.java +++ b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineJob.java @@ -31,7 +31,6 @@ import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.*; @@ -53,25 +52,25 @@ public static String getRawPepXMLSuffix() } // useful for constructing a filename for creation, will append .gz if indicated - public static File getPepXMLConvertFile(File dirAnalysis, String baseName, FileType.gzSupportLevel gzSupport) + public static FileLike getPepXMLConvertFile(FileLike dirAnalysis, String baseName, FileType.gzSupportLevel gzSupport) { FileType ft = new FileType(getRawPepXMLSuffix(), gzSupport); String name = ft.getName(dirAnalysis,baseName); - return new File(dirAnalysis, name); + return dirAnalysis.resolveChild(name); } // useful for locating an existing file that may or may not be .gz - public static File getPepXMLConvertFile(File dirAnalysis, String baseName) + public static FileLike getPepXMLConvertFile(FileLike dirAnalysis, String baseName) { - return getPepXMLConvertFile(dirAnalysis,baseName, FileType.gzSupportLevel.SUPPORT_GZ); + return getPepXMLConvertFile(dirAnalysis, baseName, FileType.gzSupportLevel.SUPPORT_GZ); } - protected final File _dirSequenceRoot; + protected final FileLike _dirSequenceRoot; protected boolean _fractions; @JsonCreator - protected AbstractMS2SearchPipelineJob(@JsonProperty("_dirSequenceRoot") File dirSequenceRoot) + protected AbstractMS2SearchPipelineJob(@JsonProperty("_dirSequenceRoot") FileLike dirSequenceRoot) { super(); _dirSequenceRoot = dirSequenceRoot; @@ -117,29 +116,29 @@ public AbstractMS2SearchPipelineJob(AbstractMS2SearchPipelineJob job, FileLike f protected void writeInputFilesToLog() { - for (File file : getInputFiles()) + for (FileLike file : getInputFiles()) { info(file.getName()); } } @Override - public File findInputFile(String name) + public FileLike findInputFile(String name) { - for (File fileInput : getInputFiles()) + for (FileLike fileInput : getInputFiles()) { if (name.equals(fileInput.getName())) return fileInput; } // Check if there's an analysis-specific copy of the file - File analysisFile = FileUtil.appendName(getAnalysisDirectory(), name); + FileLike analysisFile = getAnalysisDirectory().resolveChild(name); if (NetworkDrive.exists(analysisFile)) { return analysisFile; } // If not, check if there's a shared copy of the file in the data directory - File dataFile = FileUtil.appendName(getDataDirectory(), name); + FileLike dataFile = getDataDirectory().resolveChild(name); if (NetworkDrive.exists(dataFile)) { return dataFile; @@ -149,7 +148,7 @@ public File findInputFile(String name) } @Override - public File findOutputFile(String name) + public FileLike findOutputFile(String name) { // Look through all the tasks in this pipeline for (TaskId taskId : getTaskPipeline().getTaskProgression()) @@ -165,13 +164,13 @@ public File findOutputFile(String name) // mzXML should be in the same directory as the mzXML and RAW files. if (fileType.isType(name)) { - return FileUtil.appendName(getDataDirectory(), name); + return getDataDirectory().resolveChild(name); } } } } - return FileUtil.appendName(getAnalysisDirectory(), name); + return getAnalysisDirectory().resolveChild(name); } /** @@ -195,10 +194,10 @@ public boolean isRefreshRequired() } @Override - public List getInteractInputFiles() + public List getInteractInputFiles() { - List files = new ArrayList<>(); - for (File fileSpectra : getInputFiles()) + List files = new ArrayList<>(); + for (FileLike fileSpectra : getInputFiles()) { files.add(getPepXMLConvertFile(getAnalysisDirectory(), FileUtil.getBaseName(fileSpectra), @@ -208,7 +207,7 @@ public List getInteractInputFiles() } @Override - public List getInteractSpectraFiles() + public List getInteractSpectraFiles() { // Default to looking for just mzXML files List types = Collections.singletonList(AbstractMS2SearchProtocol.FT_MZXML); @@ -225,13 +224,13 @@ public List getInteractSpectraFiles() } } - ArrayList files = new ArrayList<>(); - for (File fileSpectra : getInputFiles()) + List files = new ArrayList<>(); + for (FileLike fileSpectra : getInputFiles()) { // Look at the different types in priority order for (FileType type : types) { - File f = type.newFile(getDataDirectory(), FileUtil.getBaseName(fileSpectra)); + FileLike f = type.newFile(getDataDirectory(), FileUtil.getBaseName(fileSpectra)); if (NetworkDrive.exists(f)) { files.add(f); @@ -244,21 +243,21 @@ public List getInteractSpectraFiles() } @Override - public File getSearchNativeSpectraFile() + public FileLike getSearchNativeSpectraFile() { return null; // No spectra conversion by default. } @Override - public File getSequenceRootDirectory() + public FileLike getSequenceRootDirectory() { return _dirSequenceRoot; } @Override - public File[] getSequenceFiles() + public List getSequenceFiles() { - ArrayList arrFiles = new ArrayList<>(); + List arrFiles = new ArrayList<>(); String paramDatabase = getParameters().get("pipeline, database"); if (paramDatabase != null) @@ -268,7 +267,7 @@ public File[] getSequenceFiles() arrFiles.add(MS2PipelineManager.getSequenceDBFile(_dirSequenceRoot, path)); } - return arrFiles.toArray(new File[0]); + return arrFiles; } @Override diff --git a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineProvider.java b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineProvider.java index 2cc7d2edc..4c036244f 100644 --- a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineProvider.java +++ b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchPipelineProvider.java @@ -23,12 +23,10 @@ import org.labkey.api.pipeline.PipelineDirectory; import org.labkey.api.pipeline.TaskPipeline; import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.util.FileUtil; import org.labkey.api.view.HttpView; import org.labkey.api.view.ViewContext; import java.io.File; -import java.nio.file.Path; /** * Common base class for pipeline providers that map to MS2 searches (XTandem, Mascot, etc) @@ -87,14 +85,6 @@ public boolean isSearch() return true; } - @Override - public void initSystemDirectory(Path rootDir, Path systemDir) - { - AbstractMS2SearchProtocolFactory factory = getProtocolFactory(); - if (factory != null && !FileUtil.hasCloudScheme(rootDir) && !FileUtil.hasCloudScheme(systemDir)) - factory.initSystemDirectory(rootDir.toFile(), systemDir.toFile()); - } - @Override public AbstractMS2SearchProtocolFactory getProtocolFactory(TaskPipeline pipeline) { diff --git a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchProtocol.java b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchProtocol.java index a7f069d98..ed2937187 100644 --- a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchProtocol.java +++ b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchProtocol.java @@ -30,9 +30,7 @@ import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -63,7 +61,7 @@ public String getJoinedBaseName() return LEGACY_JOINED_BASENAME; } - public File getDirSeqRoot() + public FileLike getDirSeqRoot() { return MS2PipelineManager.getSequenceDatabaseRoot(_container, true); } diff --git a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchTaskFactory.java b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchTaskFactory.java index e620b2a8f..b705d9d28 100644 --- a/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchTaskFactory.java +++ b/ms2/src/org/labkey/ms2/pipeline/AbstractMS2SearchTaskFactory.java @@ -22,8 +22,8 @@ import org.labkey.api.pipeline.TaskFactory; import org.labkey.api.util.FileType; import org.labkey.api.util.NetworkDrive; +import org.labkey.vfs.FileLike; -import java.io.File; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -93,16 +93,16 @@ public String getStatusName() return "SEARCH"; } - public File findInputFile(MS2SearchJobSupport support) throws PipelineJobException + public FileLike findInputFile(MS2SearchJobSupport support) throws PipelineJobException { - File analysisDirectory = support.getAnalysisDirectory(); - File dataDirectory = support.getDataDirectory(); + FileLike analysisDirectory = support.getAnalysisDirectory(); + FileLike dataDirectory = support.getDataDirectory(); String baseName = support.getBaseName(); for (FileType fileType : getInputTypes()) { // Check if there's a version of the file in the analysis directory first. This ensures we grab the // analysis-specific version of the spectra file, if it exists - File f = fileType.newFile(analysisDirectory, baseName); + FileLike f = fileType.newFile(analysisDirectory, baseName); if (NetworkDrive.exists(f)) { return f; diff --git a/ms2/src/org/labkey/ms2/pipeline/FastaCheckTask.java b/ms2/src/org/labkey/ms2/pipeline/FastaCheckTask.java index 9818394ad..6cc3d0e98 100644 --- a/ms2/src/org/labkey/ms2/pipeline/FastaCheckTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/FastaCheckTask.java @@ -25,8 +25,8 @@ import org.labkey.api.util.FileType; import org.labkey.api.util.FileUtil; import org.labkey.api.protein.fasta.FastaValidator; +import org.labkey.vfs.FileLike; -import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -132,7 +132,7 @@ public RecordedActionSet run() throws PipelineJobException boolean success = true; FastaValidator validator = new FastaValidator(); - for (File sequenceFile : getJobSupport().getSequenceFiles()) + for (FileLike sequenceFile : getJobSupport().getSequenceFiles()) { action.addInput(sequenceFile, "FASTA"); @@ -147,7 +147,7 @@ public RecordedActionSet run() throws PipelineJobException if (_factory._requireDecoyDatabase) { - for (File decoyFile : getDecoySequenceFiles(getJobSupport())) + for (FileLike decoyFile : getDecoySequenceFiles(getJobSupport())) { getJob().info("Checking decoy file: " + decoyFile); success &= validateSequenceFile(validator, decoyFile); @@ -170,9 +170,9 @@ public RecordedActionSet run() throws PipelineJobException } - public static List getDecoySequenceFiles(MS2SearchJobSupport job) + public static List getDecoySequenceFiles(MS2SearchJobSupport job) { - List result = new ArrayList<>(); + List result = new ArrayList<>(); if (job.getParameters().get(DECOY_DATABASE_PARAM_NAME) != null) { String decoyPath = job.getParameters().get(DECOY_DATABASE_PARAM_NAME); @@ -180,7 +180,7 @@ public static List getDecoySequenceFiles(MS2SearchJobSupport job) } else { - for (File sequenceFile : job.getSequenceFiles()) + for (FileLike sequenceFile : job.getSequenceFiles()) { String basename = FileUtil.getBaseName(sequenceFile); String extension = FileUtil.getExtension(sequenceFile); @@ -189,10 +189,10 @@ public static List getDecoySequenceFiles(MS2SearchJobSupport job) throw new IllegalStateException("No decoy file suffixes configured!"); } int i = 0; - File decoyFile = new File(sequenceFile.getParentFile(), basename + DECOY_FILE_SUFFIXES.get(i++) + (extension == null ? "" : "." + extension)); + FileLike decoyFile = sequenceFile.getParent().resolveChild(basename + DECOY_FILE_SUFFIXES.get(i++) + (extension == null ? "" : "." + extension)); while (!decoyFile.exists() && i < DECOY_FILE_SUFFIXES.size()) { - decoyFile = new File(sequenceFile.getParentFile(), basename + DECOY_FILE_SUFFIXES.get(i++) + (extension == null ? "" : "." + extension)); + decoyFile = sequenceFile.getParent().resolveChild(basename + DECOY_FILE_SUFFIXES.get(i++) + (extension == null ? "" : "." + extension)); } result.add(decoyFile); } @@ -200,7 +200,7 @@ public static List getDecoySequenceFiles(MS2SearchJobSupport job) return result; } - private boolean validateSequenceFile(FastaValidator validator, File fastaFile) + private boolean validateSequenceFile(FastaValidator validator, FileLike fastaFile) { if (!fastaFile.exists()) { diff --git a/ms2/src/org/labkey/ms2/pipeline/MS2PipelineJobSupport.java b/ms2/src/org/labkey/ms2/pipeline/MS2PipelineJobSupport.java index d38f31dcc..b3400d9d2 100644 --- a/ms2/src/org/labkey/ms2/pipeline/MS2PipelineJobSupport.java +++ b/ms2/src/org/labkey/ms2/pipeline/MS2PipelineJobSupport.java @@ -16,8 +16,9 @@ package org.labkey.ms2.pipeline; import org.labkey.api.pipeline.file.FileAnalysisJobSupport; +import org.labkey.vfs.FileLike; -import java.io.File; +import java.util.List; /** * MS2PipelineJobSupport Interface for providing MS2 search support to @@ -28,12 +29,12 @@ public interface MS2PipelineJobSupport extends FileAnalysisJobSupport /** * Returns the root sequence file directory. */ - File getSequenceRootDirectory(); + FileLike getSequenceRootDirectory(); /** * Returns a list of FASTA files to be searched. */ - File[] getSequenceFiles(); + List getSequenceFiles(); /** * Returns true if the job should perform a combined analysis of diff --git a/ms2/src/org/labkey/ms2/pipeline/MS2PipelineManager.java b/ms2/src/org/labkey/ms2/pipeline/MS2PipelineManager.java index 1794d8443..4ba2a6a2f 100644 --- a/ms2/src/org/labkey/ms2/pipeline/MS2PipelineManager.java +++ b/ms2/src/org/labkey/ms2/pipeline/MS2PipelineManager.java @@ -15,8 +15,6 @@ */ package org.labkey.ms2.pipeline; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; import org.labkey.api.data.Container; import org.labkey.api.pipeline.*; import org.labkey.api.pipeline.cmd.ConvertTaskId; @@ -26,6 +24,7 @@ import org.labkey.api.util.Path; import org.labkey.api.view.NotFoundException; import org.labkey.ms2.pipeline.mascot.MascotSearchTask; +import org.labkey.vfs.FileLike; import org.labkey.vfs.FileSystemLike; import java.io.*; @@ -45,7 +44,6 @@ */ public class MS2PipelineManager { - private static final Logger _log = LogManager.getLogger(MS2PipelineProvider.class); private static final String DEFAULT_FASTA_DIR = "databases"; public static final String SEQUENCE_DB_ROOT_TYPE = "SEQUENCE_DATABASE"; @@ -67,7 +65,7 @@ public boolean accept(File file) if (TPPTask.isPepXMLFile(file)) { - File parent = file.getParentFile(); + FileLike parent = FileSystemLike.wrapFile(file.getParentFile()); String basename = TPPTask.FT_PEP_XML.getBaseName(file); return !fileExists(TPPTask.getProtXMLFile(parent, basename)) && !fileExists(AbstractMS2SearchProtocol.FT_SEARCH_XAR.newFile(parent, basename)); @@ -99,19 +97,15 @@ public boolean accept(File f) } - public static File getSequenceDBFile(File fileRoot, String name) + public static FileLike getSequenceDBFile(FileLike fileRoot, String name) { if (fileRoot == null) throw new IllegalArgumentException("Invalid sequence root directory."); - File file = new File(fileRoot, name); - if (!file.getAbsolutePath().startsWith(fileRoot.getAbsolutePath())) - throw new IllegalArgumentException("Invalid sequence database '" + name + "'."); - - return file; + return fileRoot.resolveFile(Path.parse(name)); } - public static File getSequenceDatabaseRoot(Container container, boolean includeParentContainers) + public static FileLike getSequenceDatabaseRoot(Container container, boolean includeParentContainers) { PipeRoot dbRoot = includeParentContainers ? PipelineService.get().findPipelineRoot(container, SEQUENCE_DB_ROOT_TYPE) : PipelineService.get().getPipelineRootSetting(container, SEQUENCE_DB_ROOT_TYPE); if (dbRoot == null) @@ -120,8 +114,8 @@ public static File getSequenceDatabaseRoot(Container container, boolean includeP PipeRoot root = PipelineService.get().getPipelineRootSetting(container); if (root != null) { - File file = getSequenceDatabaseRoot(root); - if (!NetworkDrive.exists(file) && NetworkDrive.exists(file.getParentFile())) + FileLike file = getSequenceDatabaseRoot(root); + if (!NetworkDrive.exists(file) && NetworkDrive.exists(file.getParent())) { // Try to create it if it doesn't exist try @@ -140,7 +134,7 @@ public static File getSequenceDatabaseRoot(Container container, boolean includeP } throw new NotFoundException("Could not find database sequence root for " + container.getPath()); } - return dbRoot.getRootPath(); + return dbRoot.getRootFileLike(); } public static void setSequenceDatabaseRoot(User user, Container container, URI rootSeq) throws SQLException @@ -155,19 +149,19 @@ public static void setSequenceDatabaseRoot(User user, Container container, URI r service.setPipelineRoot(user, container, SEQUENCE_DB_ROOT_TYPE, false, rootSeq); } - private static File getSequenceDatabaseRoot(PipeRoot root) + private static FileLike getSequenceDatabaseRoot(PipeRoot root) { - return root.resolvePath(DEFAULT_FASTA_DIR); + return root.resolvePathToFileLike(DEFAULT_FASTA_DIR); } - public static File getLocalMascotFile(File sequenceRoot, String db, String release) + public static FileLike getLocalMascotFile(FileLike sequenceRoot, String db, String release) { - return FileUtil.appendPath(sequenceRoot, Path.parse("mascot/" + db + "/" + release)); + return sequenceRoot.resolveFile(Path.parse("mascot/" + db + "/" + release)); } - public static File getLocalMascotFileHash(File sequenceRoot, String db, String release) + public static FileLike getLocalMascotFileHash(FileLike sequenceRoot, String db, String release) { - return FileUtil.appendPath(sequenceRoot, Path.parse("mascot/" + db + "/" + release+".hash")); + return sequenceRoot.resolveFile(Path.parse("mascot/" + db + "/" + release+".hash")); } public static boolean exists(File file, Set knownFiles, Set checkedDirectories) @@ -187,21 +181,4 @@ public static boolean exists(File file, Set knownFiles, Set checkedD return file.exists(); } - private static class SequenceDbFileFilter implements FileFilter - { - @Override - public boolean accept(File f) - { - final String name = f.getName(); - //added filters for Sequest indexed databases - return !(name.startsWith(".") || - name.endsWith(".check") || - name.endsWith(".out") || - name.endsWith(".idx") || - name.endsWith(".dgt") || - name.endsWith(".log") || - name.endsWith(".hdr") || - name.endsWith(".hash")); - } - } } diff --git a/ms2/src/org/labkey/ms2/pipeline/MS2SearchJobSupport.java b/ms2/src/org/labkey/ms2/pipeline/MS2SearchJobSupport.java index 9b296243c..74fc89449 100644 --- a/ms2/src/org/labkey/ms2/pipeline/MS2SearchJobSupport.java +++ b/ms2/src/org/labkey/ms2/pipeline/MS2SearchJobSupport.java @@ -15,6 +15,8 @@ */ package org.labkey.ms2.pipeline; +import org.labkey.vfs.FileLike; + import java.io.File; /** @@ -26,12 +28,12 @@ public interface MS2SearchJobSupport extends MS2PipelineJobSupport /** * Returns the native output file for the search. */ - File getSearchNativeOutputFile(); + FileLike getSearchNativeOutputFile(); /** * Returns native spectra file converted from the standard format, * or null if the standard format was used. */ - File getSearchNativeSpectraFile(); + FileLike getSearchNativeSpectraFile(); } diff --git a/ms2/src/org/labkey/ms2/pipeline/PipelineController.java b/ms2/src/org/labkey/ms2/pipeline/PipelineController.java index 116fb2974..c5c41959d 100644 --- a/ms2/src/org/labkey/ms2/pipeline/PipelineController.java +++ b/ms2/src/org/labkey/ms2/pipeline/PipelineController.java @@ -230,7 +230,7 @@ public ActionURL getSuccessURL(SequenceDBRootForm form) @Override public ModelAndView getView(SequenceDBRootForm form, boolean reshow, BindException errors) { - File fileRoot = MS2PipelineManager.getSequenceDatabaseRoot(getContainer(), false); + FileLike fileRoot = MS2PipelineManager.getSequenceDatabaseRoot(getContainer(), false); final String localPathRoot; if (fileRoot == null) diff --git a/ms2/src/org/labkey/ms2/pipeline/ProteinProphetPipelineProvider.java b/ms2/src/org/labkey/ms2/pipeline/ProteinProphetPipelineProvider.java index 40b58b6a3..47b4a94fd 100644 --- a/ms2/src/org/labkey/ms2/pipeline/ProteinProphetPipelineProvider.java +++ b/ms2/src/org/labkey/ms2/pipeline/ProteinProphetPipelineProvider.java @@ -24,6 +24,7 @@ import org.labkey.api.module.Module; import org.labkey.api.security.permissions.InsertPermission; import org.labkey.ms2.MS2Controller; +import org.labkey.vfs.FileSystemLike; import java.io.File; @@ -65,7 +66,7 @@ public boolean accept(File f) File parent = f.getParentFile(); String basename = fileType.getBaseName(f); - return !fileExists(AbstractMS2SearchProtocol.FT_SEARCH_XAR.newFile(parent, basename)); + return !fileExists(AbstractMS2SearchProtocol.FT_SEARCH_XAR.newFile(FileSystemLike.wrapFile(parent), basename)); } return false; diff --git a/ms2/src/org/labkey/ms2/pipeline/Sqt2PinTask.java b/ms2/src/org/labkey/ms2/pipeline/Sqt2PinTask.java index 42b808f43..7ad059f33 100644 --- a/ms2/src/org/labkey/ms2/pipeline/Sqt2PinTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/Sqt2PinTask.java @@ -31,6 +31,7 @@ import org.labkey.api.pipeline.file.AbstractFileAnalysisJob; import org.labkey.api.util.FileType; import org.labkey.api.writer.PrintWriters; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; @@ -66,10 +67,11 @@ public RecordedActionSet run() throws PipelineJobException { TaskPath targetListTP = new TaskPath(".target.list"); TaskPath decoyListTP = new TaskPath(".decoy.list"); - File targetListFile = _wd.newWorkFile(WorkDirectory.Function.output, targetListTP, job.getBaseName()); - File decoyListFile = _wd.newWorkFile(WorkDirectory.Function.output, decoyListTP, job.getBaseName()); + FileLike targetListFile = _wd.newWorkFile(WorkDirectory.Function.output, targetListTP, job.getBaseName()); + FileLike decoyListFile = _wd.newWorkFile(WorkDirectory.Function.output, decoyListTP, job.getBaseName()); - try (PrintWriter targetWriter = PrintWriters.getPrintWriter(targetListFile); PrintWriter decoyWriter = PrintWriters.getPrintWriter(decoyListFile)) + try (PrintWriter targetWriter = PrintWriters.getPrintWriter(targetListFile.openOutputStream()); + PrintWriter decoyWriter = PrintWriters.getPrintWriter(decoyListFile.openOutputStream())) { FileType targetSQTFileType = new FileType(".sqt"); FileType decoySQTFileType = new FileType(".decoy.sqt"); @@ -84,9 +86,9 @@ public RecordedActionSet run() throws PipelineJobException decoyWriter.write(decoyFileName); decoyWriter.write("\n"); - File inputTargetFile = new File(job.getAnalysisDirectory(), targetFileName); + FileLike inputTargetFile = job.getAnalysisDirectory().resolveChild(targetFileName); _wd.inputFile(inputTargetFile, false); - File inputDecoyFile = new File(job.getAnalysisDirectory(), decoyFileName); + FileLike inputDecoyFile = job.getAnalysisDirectory().resolveChild(decoyFileName); _wd.inputFile(inputDecoyFile, false); action.addInput(inputTargetFile, "SQT" + (index == 1 ? "" : Integer.toString(index))); action.addInput(inputDecoyFile, "DecoySQT" + (index == 1 ? "" : Integer.toString(index))); @@ -94,7 +96,7 @@ public RecordedActionSet run() throws PipelineJobException } } - File output = new File(_wd.getDir(), job.getBaseName() + ".pin.xml"); + FileLike output = _wd.getDir().resolveChild(job.getBaseName() + ".pin.xml"); List args = new ArrayList<>(); String version = getJob().getParameters().get("sqt2pin, version"); @@ -108,7 +110,7 @@ public RecordedActionSet run() throws PipelineJobException args.add(decoyListFile.getName()); ProcessBuilder pb = new ProcessBuilder(args); - pb.directory(_wd.getDir()); + pb.directory(_wd.getDir().toNioPathForRead().toFile()); job.runSubProcess(pb, _wd.getDir()); diff --git a/ms2/src/org/labkey/ms2/pipeline/TPPTask.java b/ms2/src/org/labkey/ms2/pipeline/TPPTask.java index 9ae3d9231..3923ed902 100644 --- a/ms2/src/org/labkey/ms2/pipeline/TPPTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/TPPTask.java @@ -39,6 +39,7 @@ import org.labkey.api.util.PepXMLFileType; import org.labkey.api.util.ProtXMLFileType; import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.io.FileNotFoundException; @@ -115,7 +116,7 @@ public static String getTPPVersion(PipelineJob job) return job.getParameters().get("pipeline tpp, version"); } - public static File getPepXMLFile(File dirAnalysis, String baseName) + public static FileLike getPepXMLFile(FileLike dirAnalysis, String baseName) { return FT_PEP_XML.newFile(dirAnalysis, baseName); } @@ -130,7 +131,7 @@ public static boolean isPepXMLFile(FileLike file) return FT_PEP_XML.isType(file); } - public static File getProtXMLFile(File dirAnalysis, String baseName) + public static FileLike getProtXMLFile(FileLike dirAnalysis, String baseName) { return FT_PROT_XML.newFile(dirAnalysis, baseName); } @@ -158,12 +159,12 @@ public interface JobSupport extends MS2PipelineJobSupport /** * List of pepXML files to use as inputs to "xinteract". */ - List getInteractInputFiles(); + List getInteractInputFiles(); /** * List of mzXML files to use as inputs to "xinteract" quantitation. */ - List getInteractSpectraFiles(); + List getInteractSpectraFiles(); /** * True if PeptideProphet and ProteinProphet can be run on the input files. @@ -191,7 +192,7 @@ public Factory(String name) } @Override - public PipelineJob.Task createTask(PipelineJob job) + public TPPTask createTask(PipelineJob job) { return new TPPTask(this, job); } @@ -219,7 +220,7 @@ public boolean isJobComplete(PipelineJob job) { JobSupport support = job.getJobSupport(JobSupport.class); String baseName = support.getBaseName(); - File dirAnalysis = support.getAnalysisDirectory(); + FileLike dirAnalysis = support.getAnalysisDirectory(); if (!NetworkDrive.exists(getPepXMLFile(dirAnalysis, baseName))) return false; @@ -301,33 +302,33 @@ public RecordedActionSet run() throws PipelineJobException actions.add(pepXMLAction); // Set mzXML directory only if needed. - File dirMzXml = null; + FileLike dirMzXml = null; // TODO: mzXML files may be required, and input disk space requirements // may be too great to copy to a temporary directory. - List inputFiles = getJobSupport().getInteractInputFiles(); - List inputWorkFiles = new ArrayList<>(inputFiles.size()); - for (File fileInput : inputFiles) + List inputFiles = getJobSupport().getInteractInputFiles(); + List inputWorkFiles = new ArrayList<>(inputFiles.size()); + for (FileLike fileInput : inputFiles) { pepXMLAction.addInput(fileInput, "RawPepXML"); } - List spectraFiles = new ArrayList<>(); + List spectraFiles = new ArrayList<>(); boolean proteinProphetOutput = getJobSupport().isProphetEnabled(); if (!inputFiles.isEmpty()) { try (WorkDirectory.CopyingResource ignored = _wd.ensureCopyingLock()) { - for (File inputFile : inputFiles) + for (FileLike inputFile : inputFiles) inputWorkFiles.add(_wd.inputFile(inputFile, false)); // Always copy spectra files to be local, since PeptideProphet wants them as of TPP 4.6.3 - for (File spectraFile : getJobSupport().getInteractSpectraFiles()) + for (FileLike spectraFile : getJobSupport().getInteractSpectraFiles()) { spectraFiles.add(_wd.inputFile(spectraFile, true)); if (dirMzXml == null) - dirMzXml = spectraFile.getParentFile(); + dirMzXml = spectraFile.getParent(); } } } @@ -351,7 +352,7 @@ public RecordedActionSet run() throws PipelineJobException } } - File fileWorkPepXML = _wd.newFile(FT_PEP_XML); + FileLike fileWorkPepXML = _wd.newFile(FT_PEP_XML); String ver = getTPPVersion(getJob()); List interactCmd = new ArrayList<>(); @@ -444,7 +445,7 @@ public RecordedActionSet run() throws PipelineJobException interactCmd.add("-N" + fileWorkPepXML.getName()); - for (File fileInput : inputWorkFiles) + for (FileLike fileInput : inputWorkFiles) interactCmd.add(_wd.getRelativePath(fileInput)); ProcessBuilder builder = new ProcessBuilder(interactCmd); @@ -488,7 +489,7 @@ public RecordedActionSet run() throws PipelineJobException File realTppModelsFile = new File(PipelineJobService.get().getExecutablePath("tpp_models.pl", null, "tpp", ver, getJob().getLogger())); if (realTppModelsFile.exists()) { - _wd.inputFile(realTppModelsFile, true); + _wd.inputFile(FileSystemLike.wrapFile(realTppModelsFile), true); } } catch (FileNotFoundException ignored) @@ -535,17 +536,17 @@ public RecordedActionSet run() throws PipelineJobException try (WorkDirectory.CopyingResource ignored = _wd.ensureCopyingLock()) { - File filePepXML = _wd.outputFile(fileWorkPepXML); + FileLike filePepXML = _wd.outputFile(fileWorkPepXML); // Set up the first step with the right outputs pepXMLAction.addOutput(filePepXML, PEP_XML_INPUT_ROLE, false); - File fileProtXML = null; + FileLike fileProtXML = null; if (proteinProphetOutput) { // If we ran ProteinProphet, set up a step with the right inputs and outputs - File fileWorkProtXML = _wd.newFile(FT_PROT_XML); + FileLike fileWorkProtXML = _wd.newFile(FT_PROT_XML); fileProtXML = _wd.outputFile(fileWorkProtXML, FT_PROT_XML.getDefaultName(getJobSupport().getBaseName())); @@ -559,7 +560,7 @@ public RecordedActionSet run() throws PipelineJobException // we need to deal with them so that we don't complain about unexpected files for (Map.Entry entry : FT_OPTIONAL_AND_IGNORABLES.entrySet()) { - File workFile = _wd.newFile(entry.getKey()); + FileLike workFile = _wd.newFile(entry.getKey()); { // Check if it exists if (!NetworkDrive.exists(workFile)) @@ -570,7 +571,7 @@ public RecordedActionSet run() throws PipelineJobException else { // If so, then grab it and mark as an output - File outputFile = _wd.outputFile(workFile); + FileLike outputFile = _wd.outputFile(workFile); protXMLAction.addOutput(outputFile, entry.getValue(), false); } } @@ -579,7 +580,7 @@ public RecordedActionSet run() throws PipelineJobException if (peptideQuantAction != null) { - for (File file : getJobSupport().getInteractSpectraFiles()) + for (FileLike file : getJobSupport().getInteractSpectraFiles()) { peptideQuantAction.addInput(file, "mzXML"); } @@ -599,8 +600,8 @@ public RecordedActionSet run() throws PipelineJobException if (quantConfigFile != null) { // Rename from the static name quantitation.tsv to .libra.tsv - File libraOutputWork = new File(_wd.getDir(), "quantitation.tsv"); - File libraOutput = _wd.outputFile(libraOutputWork, FT_LIBRA_QUANTITATION.getName(_wd.getDir(), getJobSupport().getBaseName())); + FileLike libraOutputWork = _wd.getDir().resolveChild("quantitation.tsv"); + FileLike libraOutput = _wd.outputFile(libraOutputWork, FT_LIBRA_QUANTITATION.getName(_wd.getDir(), getJobSupport().getBaseName())); proteinQuantAction.addOutput(libraOutput, LIBRA_OUTPUT_ROLE, false); } } @@ -612,7 +613,7 @@ public RecordedActionSet run() throws PipelineJobException _wd.discardFile(_wd.newFile(FT_INTERMEDIATE_PROT_SHTML)); // We don't need the extra copy of the spectra files - for (File spectraFile : spectraFiles) + for (FileLike spectraFile : spectraFiles) { _wd.discardFile(spectraFile); } @@ -621,7 +622,7 @@ public RecordedActionSet run() throws PipelineJobException // the raw pepXML file(s). if (!getJobSupport().isFractions() || inputFiles.size() > 1) { - for (File fileInput : inputFiles) + for (FileLike fileInput : inputFiles) { if (!fileInput.delete()) getJob().warn("Failed to delete intermediate file " + fileInput); diff --git a/ms2/src/org/labkey/ms2/pipeline/comet/Comet2014ParamsBuilder.java b/ms2/src/org/labkey/ms2/pipeline/comet/Comet2014ParamsBuilder.java index ae6c92950..7a45ddf32 100644 --- a/ms2/src/org/labkey/ms2/pipeline/comet/Comet2014ParamsBuilder.java +++ b/ms2/src/org/labkey/ms2/pipeline/comet/Comet2014ParamsBuilder.java @@ -35,8 +35,8 @@ import org.labkey.ms2.pipeline.sequest.SequestParams; import org.labkey.ms2.pipeline.sequest.SequestParamsBuilder; import org.labkey.ms2.pipeline.sequest.SequestParamsException; +import org.labkey.vfs.FileLike; -import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -72,7 +72,7 @@ public class Comet2014ParamsBuilder extends SequestParamsBuilder COMET_ENZYME_MAP = Collections.unmodifiableMap(m); } - public Comet2014ParamsBuilder(Map sequestInputParams, File sequenceRoot) + public Comet2014ParamsBuilder(Map sequestInputParams, FileLike sequenceRoot) { super(sequestInputParams, sequenceRoot, SequestParams.Variant.comet); } @@ -614,8 +614,6 @@ public String getSequestParamsText() throws SequestParamsException public static class LimitedParseTestCase extends Assert { - private final File _root = new File("fakeroot"); - @Test public void testGenerateFile() throws SequestParamsException { @@ -625,7 +623,7 @@ public void testGenerateFile() throws SequestParamsException paramMap.put(ParameterNames.SEQUENCE_DB, DUMMY_FASTA_NAME); paramMap.put("comet, digest_mass_range", "400.0 5943.0"); paramMap.put("spectrum, parent monoisotopic mass error units", "mmu"); - Comet2014ParamsBuilder spb = new Comet2014ParamsBuilder(paramMap, _root); + Comet2014ParamsBuilder spb = new Comet2014ParamsBuilder(paramMap, AbstractSequestTestCase.ROOT); spb.initXmlValues(); String text = spb.getSequestParamsText(); assertTrue(text.contains("database_name =")); @@ -649,7 +647,7 @@ public void testDecoy() throws SequestParamsException paramMap.put(ParameterNames.SEQUENCE_DB, DUMMY_FASTA_NAME); paramMap.put("comet, decoy_search", "1"); paramMap.put("comet, decoy_prefix", "NEW_PREFIX_"); - Comet2014ParamsBuilder spb = new Comet2014ParamsBuilder(paramMap, _root); + Comet2014ParamsBuilder spb = new Comet2014ParamsBuilder(paramMap, AbstractSequestTestCase.ROOT); spb.initXmlValues(); String text = spb.getSequestParamsText(); assertTrue(text.contains("decoy_search = 1")); @@ -659,14 +657,14 @@ public void testDecoy() throws SequestParamsException @Test public void testEnzymes() throws SequestParamsException { - Comet2014ParamsBuilder spb = new Comet2014ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|{P}"), _root); + Comet2014ParamsBuilder spb = new Comet2014ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|{P}"), AbstractSequestTestCase.ROOT); spb.initEnzymeInfo(); String text = spb.getSequestParamsText(); assertTrue(text.contains("search_enzyme_number = 1")); assertTrue(text.contains("sample_enzyme_number = 1")); assertTrue(text.contains("1. Trypsin")); - spb = new Comet2014ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|[X]"), _root); + spb = new Comet2014ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|[X]"), AbstractSequestTestCase.ROOT); spb.initEnzymeInfo(); text = spb.getSequestParamsText(); assertTrue(text.contains("search_enzyme_number = 2")); @@ -680,7 +678,7 @@ public static class FullParseTestCase extends AbstractSequestTestCase @Override public SequestParamsBuilder createParamsBuilder() { - return new Comet2014ParamsBuilder(ip.getInputParameters(), root); + return new Comet2014ParamsBuilder(ip.getInputParameters(), ROOT); } @Test diff --git a/ms2/src/org/labkey/ms2/pipeline/comet/Comet2015ParamsBuilder.java b/ms2/src/org/labkey/ms2/pipeline/comet/Comet2015ParamsBuilder.java index 320c47f52..457f1fe69 100644 --- a/ms2/src/org/labkey/ms2/pipeline/comet/Comet2015ParamsBuilder.java +++ b/ms2/src/org/labkey/ms2/pipeline/comet/Comet2015ParamsBuilder.java @@ -34,8 +34,8 @@ import org.labkey.ms2.pipeline.sequest.SequestParams; import org.labkey.ms2.pipeline.sequest.SequestParamsBuilder; import org.labkey.ms2.pipeline.sequest.SequestParamsException; +import org.labkey.vfs.FileLike; -import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -72,7 +72,7 @@ public class Comet2015ParamsBuilder extends SequestParamsBuilder COMET_ENZYME_MAP = Collections.unmodifiableMap(m); } - public Comet2015ParamsBuilder(Map sequestInputParams, File sequenceRoot) + public Comet2015ParamsBuilder(Map sequestInputParams, FileLike sequenceRoot) { super(sequestInputParams, sequenceRoot, SequestParams.Variant.comet); } @@ -627,8 +627,6 @@ public String getSequestParamsText() throws SequestParamsException public static class LimitedParseTestCase extends Assert { - private final File _root = new File("fakeroot"); - @Test public void testGenerateFile() throws SequestParamsException { @@ -638,7 +636,7 @@ public void testGenerateFile() throws SequestParamsException paramMap.put(ParameterNames.SEQUENCE_DB, DUMMY_FASTA_NAME); paramMap.put("comet, digest_mass_range", "400.0 5943.0"); paramMap.put("spectrum, parent monoisotopic mass error units", "mmu"); - Comet2015ParamsBuilder spb = new Comet2015ParamsBuilder(paramMap, _root); + Comet2015ParamsBuilder spb = new Comet2015ParamsBuilder(paramMap, AbstractSequestTestCase.ROOT); spb.initXmlValues(); String text = spb.getSequestParamsText(); assertTrue(text.contains("database_name =")); @@ -662,7 +660,7 @@ public void testDecoy() throws SequestParamsException paramMap.put(ParameterNames.SEQUENCE_DB, DUMMY_FASTA_NAME); paramMap.put("comet, decoy_search", "1"); paramMap.put("comet, decoy_prefix", "NEW_PREFIX_"); - Comet2015ParamsBuilder spb = new Comet2015ParamsBuilder(paramMap, _root); + Comet2015ParamsBuilder spb = new Comet2015ParamsBuilder(paramMap, AbstractSequestTestCase.ROOT); spb.initXmlValues(); String text = spb.getSequestParamsText(); assertTrue(text.contains("decoy_search = 1")); @@ -672,14 +670,14 @@ public void testDecoy() throws SequestParamsException @Test public void testEnzymes() throws SequestParamsException { - Comet2015ParamsBuilder spb = new Comet2015ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|{P}"), _root); + Comet2015ParamsBuilder spb = new Comet2015ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|{P}"), AbstractSequestTestCase.ROOT); spb.initEnzymeInfo(); String text = spb.getSequestParamsText(); assertTrue(text.contains("search_enzyme_number = 1")); assertTrue(text.contains("sample_enzyme_number = 1")); assertTrue(text.contains("1. Trypsin")); - spb = new Comet2015ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|[X]"), _root); + spb = new Comet2015ParamsBuilder(Collections.singletonMap(ParameterNames.ENZYME, "[KR]|[X]"), AbstractSequestTestCase.ROOT); spb.initEnzymeInfo(); text = spb.getSequestParamsText(); assertTrue(text.contains("search_enzyme_number = 2")); @@ -693,7 +691,7 @@ public static class FullParseTestCase extends AbstractSequestTestCase @Override public SequestParamsBuilder createParamsBuilder() { - return new Comet2015ParamsBuilder(ip.getInputParameters(), root); + return new Comet2015ParamsBuilder(ip.getInputParameters(), ROOT); } @Test diff --git a/ms2/src/org/labkey/ms2/pipeline/comet/CometPipelineJob.java b/ms2/src/org/labkey/ms2/pipeline/comet/CometPipelineJob.java index 66b3bc6aa..21cbcddac 100644 --- a/ms2/src/org/labkey/ms2/pipeline/comet/CometPipelineJob.java +++ b/ms2/src/org/labkey/ms2/pipeline/comet/CometPipelineJob.java @@ -26,7 +26,6 @@ import org.labkey.ms2.pipeline.sequest.SequestSearchTask; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.List; @@ -39,7 +38,7 @@ public class CometPipelineJob extends AbstractMS2SearchPipelineJob public static final TaskId TASK_ID = new TaskId(CometPipelineJob.class); @JsonCreator - protected CometPipelineJob(@JsonProperty("_dirSequenceRoot") File dirSequenceRoot) + protected CometPipelineJob(@JsonProperty("_dirSequenceRoot") FileLike dirSequenceRoot) { super(dirSequenceRoot); } @@ -76,7 +75,7 @@ public TaskId getTaskPipelineId() } @Override - public File getSearchNativeOutputFile() + public FileLike getSearchNativeOutputFile() { return SequestSearchTask.getNativeOutputFile(getAnalysisDirectory(), getBaseName(), getGZPreference()); } diff --git a/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchProtocol.java b/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchProtocol.java index 5d2afec02..298438232 100644 --- a/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchProtocol.java +++ b/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchProtocol.java @@ -19,14 +19,12 @@ import org.labkey.api.data.Container; import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.pipeline.PipelineValidationException; -import org.labkey.api.util.FileUtil; +import org.labkey.api.util.Path; import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.ms2.pipeline.AbstractMS2SearchProtocol; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; -import java.nio.file.Path; import java.util.List; import java.util.Map; @@ -64,7 +62,7 @@ public void validate(PipeRoot root) throws PipelineValidationException if(dbNames.isEmpty()) throw new IllegalArgumentException("A sequence database must be selected."); - File fileSequenceDB = FileUtil.appendName(getDirSeqRoot(), dbNames.get(0)); + FileLike fileSequenceDB = getDirSeqRoot().resolveFile(Path.parse(dbNames.get(0))); if (!fileSequenceDB.exists()) throw new IllegalArgumentException("Sequence database '" + dbNames.get(0) + "' is not found in local FASTA root."); diff --git a/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchTask.java b/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchTask.java index 7babf62a9..a5b140b37 100644 --- a/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/comet/CometSearchTask.java @@ -15,7 +15,6 @@ */ package org.labkey.ms2.pipeline.comet; -import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.jetbrains.annotations.NotNull; import org.labkey.api.pipeline.PipelineJob; @@ -30,8 +29,8 @@ import org.labkey.ms2.pipeline.TPPTask; import org.labkey.ms2.pipeline.sequest.AbstractSequestSearchTaskFactory; import org.labkey.ms2.pipeline.sequest.SequestParamsBuilder; +import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -86,11 +85,11 @@ public RecordedActionSet run() throws PipelineJobException try { // Copy the mzXML file to be local - File fileMzXML = _factory.findInputFile(getJob()); - File localMzXML = _wd.inputFile(fileMzXML, true); + FileLike fileMzXML = _factory.findInputFile(getJob()); + FileLike localMzXML = _wd.inputFile(fileMzXML, true); // Write out comet.params file - File fileWorkParams = _wd.newFile(COMET_PARAMS); + FileLike fileWorkParams = _wd.newFile(COMET_PARAMS); // Default to 2015 params format, but allow for older setting String cometVersion = getJob().getParameters().get("comet, version"); @@ -115,11 +114,11 @@ public RecordedActionSet run() throws PipelineJobException getJob().runSubProcess(processBuilder, _wd.getDir()); - File fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), + FileLike fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), getJob().getBaseName(), FileType.gzSupportLevel.NO_GZ); - File pepXMLFile = TPPTask.FT_PEP_XML.getFile(_wd.getDir(), getJob().getBaseName()); + FileLike pepXMLFile = TPPTask.FT_PEP_XML.getFile(_wd.getDir(), getJob().getBaseName()); if (fileWorkPepXMLRaw.exists()) { fileWorkPepXMLRaw.delete(); @@ -131,11 +130,11 @@ public RecordedActionSet run() throws PipelineJobException RecordedAction cometAction = new RecordedAction(COMET_ACTION_NAME); cometAction.addParameter(RecordedAction.COMMAND_LINE_PARAM, StringUtils.join(args, " ")); // Copy to a name that's unique to this file and won't conflict between searches in the same directory - File jobSpecificCometParamsFile = COMET_PARAMS_FILE_TYPE.getFile(fileWorkParams.getParentFile(), getJob().getBaseName()); - FileUtils.moveFile(fileWorkParams, jobSpecificCometParamsFile); + FileLike jobSpecificCometParamsFile = COMET_PARAMS_FILE_TYPE.getFile(fileWorkParams.getParent(), getJob().getBaseName()); + fileWorkParams.move(jobSpecificCometParamsFile); cometAction.addOutput(_wd.outputFile(jobSpecificCometParamsFile), "CometParams", true); cometAction.addOutput(_wd.outputFile(fileWorkPepXMLRaw), "RawPepXML", true); - for (File file : getJob().getSequenceFiles()) + for (FileLike file : getJob().getSequenceFiles()) { cometAction.addInput(file, FASTA_INPUT_ROLE); } diff --git a/ms2/src/org/labkey/ms2/pipeline/mascot/MascotPipelineJob.java b/ms2/src/org/labkey/ms2/pipeline/mascot/MascotPipelineJob.java index 94e65d4a0..3d9d320e2 100644 --- a/ms2/src/org/labkey/ms2/pipeline/mascot/MascotPipelineJob.java +++ b/ms2/src/org/labkey/ms2/pipeline/mascot/MascotPipelineJob.java @@ -24,9 +24,7 @@ import org.labkey.ms2.pipeline.AbstractMS2SearchPipelineJob; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; -import java.nio.file.Path; import java.util.List; /** @@ -46,7 +44,7 @@ public class MascotPipelineJob extends AbstractMS2SearchPipelineJob implements M private String _mascotUserPassword; @JsonCreator - protected MascotPipelineJob(@JsonProperty("_dirSequenceRoot") File dirSequenceRoot) + protected MascotPipelineJob(@JsonProperty("_dirSequenceRoot") FileLike dirSequenceRoot) { super(dirSequenceRoot); } @@ -117,13 +115,13 @@ public AbstractFileAnalysisJob createSingleFileJob(FileLike file) } @Override - public File getSearchNativeSpectraFile() + public FileLike getSearchNativeSpectraFile() { return MascotSearchTask.getNativeSpectraFile(getAnalysisDirectory(), getBaseName()); } @Override - public File getSearchNativeOutputFile() + public FileLike getSearchNativeOutputFile() { return MascotSearchTask.getNativeOutputFile(getAnalysisDirectory(), getBaseName()); } diff --git a/ms2/src/org/labkey/ms2/pipeline/mascot/MascotSearchTask.java b/ms2/src/org/labkey/ms2/pipeline/mascot/MascotSearchTask.java index 80fad4d53..7cb8d3dac 100644 --- a/ms2/src/org/labkey/ms2/pipeline/mascot/MascotSearchTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/mascot/MascotSearchTask.java @@ -68,12 +68,12 @@ public class MascotSearchTask extends AbstractMS2SearchTask argsM2S = new ArrayList<>(); String ver = TPPTask.getTPPVersion(getJob()); argsM2S.add(PipelineJobService.get().getExecutablePath("MzXML2Search", null, "tpp", ver, getJob().getLogger())); @@ -217,7 +217,7 @@ public RecordedActionSet run() throws PipelineJobException String paramMinPeakCount = params.get("spectrum, minimum peak count"); if (paramMinPeakCount != null) argsM2S.add("-P" + paramMinPeakCount); - argsM2S.add(fileWorkSpectra.getAbsolutePath()); + argsM2S.add(fileWorkSpectra.toNioPathForRead().toFile().getAbsolutePath()); getJob().runSubProcess(new ProcessBuilder(argsM2S), _wd.getDir()); @@ -227,8 +227,8 @@ public RecordedActionSet run() throws PipelineJobException MascotClientImpl mascotClient = new MascotClientImpl(getJobSupport().getMascotServer(), getJob().getLogger(), getJobSupport().getMascotUserAccount(), getJobSupport().getMascotUserPassword()); mascotClient.setProxyURL(getJobSupport().getMascotHTTPProxy()); - int iReturn = mascotClient.search(fileWorkInputXML.getAbsolutePath(), - fileMGF.getAbsolutePath(), fileWorkDAT.getAbsolutePath()); + int iReturn = mascotClient.search(fileWorkInputXML.toNioPathForRead().toFile().getAbsolutePath(), + fileMGF.toNioPathForRead().toFile().getAbsolutePath(), fileWorkDAT.toNioPathForRead().toFile().getAbsolutePath()); if (iReturn != 0) { throw new IOException("Error code " + iReturn + " " + mascotClient.getErrorString()); @@ -277,10 +277,10 @@ else if (exceptionClass.contains("java.io.filenotfoundexception")) long nmascotFileSize = smascotFileSize == null ? -1 : Long.parseLong(smascotFileSize); long nmascotFileTimestamp= smascotFileTimestamp == null ? -1 : Long.parseLong(smascotFileTimestamp); - File dirSequenceRoot = getJobSupport().getSequenceRootDirectory(); - File localDB = MS2PipelineManager.getLocalMascotFile(dirSequenceRoot, sequenceDB, sequenceRelease); - File localDBHash = MS2PipelineManager.getLocalMascotFileHash(dirSequenceRoot, sequenceDB, sequenceRelease); - File localDBParent = localDB.getParentFile(); + FileLike dirSequenceRoot = getJobSupport().getSequenceRootDirectory(); + FileLike localDB = MS2PipelineManager.getLocalMascotFile(dirSequenceRoot, sequenceDB, sequenceRelease); + FileLike localDBHash = MS2PipelineManager.getLocalMascotFileHash(dirSequenceRoot, sequenceDB, sequenceRelease); + FileLike localDBParent = localDB.getParent(); FileUtil.mkdirs(localDBParent); long filesize=0; long timestamp=0; @@ -297,7 +297,7 @@ else if (exceptionClass.contains("java.io.filenotfoundexception")) { //c. if local copy exists & cached checking hashes do not match, download new DB and cache new hashes // let's get the hashes - Map hashes=readLocalMascotFileHash(localDBHash.getCanonicalPath()); + Map hashes=readLocalMascotFileHash(localDBHash.toNioPathForRead().toFile().getCanonicalPath()); if (null!=hashes.get("HASH")) { hash=hashes.get("HASH"); @@ -328,24 +328,24 @@ else if (exceptionClass.contains("java.io.filenotfoundexception")) if (toDownloadDB) { getJob().info("Starting download of database "+sequenceRelease+"..."); - mascotClient.downloadDB(localDB.getCanonicalPath(), + mascotClient.downloadDB(localDB.toNioPathForRead().toFile().getCanonicalPath(), sequenceDB, sequenceRelease, smascotFileHash, nmascotFileSize, nmascotFileTimestamp); getJob().info("Database "+sequenceRelease+" downloaded"); getJob().info("Saving its checksums..."); - saveLocalMascotFileHash(localDBHash.getCanonicalPath(), + saveLocalMascotFileHash(localDBHash.toNioPathForRead().toFile().getCanonicalPath(), smascotFileHash, nmascotFileSize, nmascotFileTimestamp); getJob().info("Checksums saved."); } // 2. translate Mascot result file to pep.xml format - File fileSequenceDatabase = MS2PipelineManager.getLocalMascotFile(dirSequenceRoot, sequenceDB, sequenceRelease); + FileLike fileSequenceDatabase = MS2PipelineManager.getLocalMascotFile(dirSequenceRoot, sequenceDB, sequenceRelease); String exePath = PipelineJobService.get().getExecutablePath("Mascot2XML", null, "tpp", ver, getJob().getLogger()); String[] args = { exePath, fileWorkDAT.getName(), - "-D" + fileSequenceDatabase.getAbsolutePath(), + "-D" + fileSequenceDatabase.toNioPathForRead().toFile().getAbsolutePath(), "-xml", "-notgz", // don't create the tarball of fake .out and .dta "-desc" @@ -357,14 +357,14 @@ else if (exceptionClass.contains("java.io.filenotfoundexception")) getJob().runSubProcess(new ProcessBuilder(args), _wd.getDir()); PepXMLFileType pepxft = new PepXMLFileType(true); // "true" == accept .xml as valid extension for older converters - File fileOutputPepXML = _wd.newFile(pepxft); - File fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), + FileLike fileOutputPepXML = _wd.newFile(pepxft); + FileLike fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), getJobSupport().getBaseName(), getJobSupport().getGZPreference()); // three possibilities: basename.xml, basename.pep.xml, basename.pep.xml.gz if (fileOutputPepXML.getName().endsWith(".gz")&&!fileWorkPepXMLRaw.getName().endsWith(".gz")) { - fileWorkPepXMLRaw = FileUtil.appendName(fileWorkPepXMLRaw.getParentFile(), fileWorkPepXMLRaw.getName()+".gz"); + fileWorkPepXMLRaw = fileWorkPepXMLRaw.getParent().resolveChild(fileWorkPepXMLRaw.getName()+".gz"); } if (!fileOutputPepXML.renameTo(fileWorkPepXMLRaw)) { @@ -377,7 +377,7 @@ else if (exceptionClass.contains("java.io.filenotfoundexception")) mzxml2SearchAction.addInput(fileMzXML, SPECTRA_INPUT_ROLE); mzxml2SearchAction.addOutput(fileMGF, "MGF", false); - for (File file : getJobSupport().getSequenceFiles()) + for (FileLike file : getJobSupport().getSequenceFiles()) { mascotAction.addInput(file, FASTA_INPUT_ROLE); } @@ -400,33 +400,27 @@ else if (exceptionClass.contains("java.io.filenotfoundexception")) } } - private String getSequenceDatabase(File datFile) throws IOException + private String getSequenceDatabase(FileLike datFile) throws IOException { return getMascotResultEntity(datFile, "parameters", "DB"); } - private String getDatabaseRelease(File datFile) throws IOException + private String getDatabaseRelease(FileLike datFile) throws IOException { return getMascotResultEntity(datFile, "header", "release"); } - private String getMascotResultEntity(File datFile, String mimeName, String tag) throws FileNotFoundException + private String getMascotResultEntity(FileLike datFile, String mimeName, String tag) throws FileNotFoundException { - // return the sequence database queried against in this search - final File dat = new File(datFile.getAbsolutePath()); + if (!NetworkDrive.exists(datFile)) + throw new FileNotFoundException(datFile + " not found"); - if (!NetworkDrive.exists(dat)) - throw new FileNotFoundException(datFile.getAbsolutePath() + " not found"); - - InputStream datIn = null; boolean skipParameter = true; String mimeNameSubString = "; name=\""+mimeName+"\""; String tagEqual=tag+"="; String value = null; - try + try (InputStream datIn = datFile.openInputStream()) { - datIn = new FileInputStream(dat); - BufferedReader datReader = new BufferedReader(new InputStreamReader(datIn)); String line; @@ -455,28 +449,20 @@ private String getMascotResultEntity(File datFile, String mimeName, String tag) { // fail to readLine! } - finally - { - if (datIn != null) { try { datIn.close(); } catch (IOException e) {} } - } return value; } - private Map readLocalMascotFileHash(String filepath) + private Map readLocalMascotFileHash(String filepath) throws IOException { final File hashFile = new File(filepath); Map returns=new HashMap<>(); if (hashFile.exists()) { - InputStream datIn; - try + try (InputStream datIn = new FileInputStream(hashFile)) { - datIn = new FileInputStream(hashFile); - InputStream in = new BufferedInputStream(datIn); - Properties results=new Properties(); - try + try (InputStream in = new BufferedInputStream(datIn)) { results.load(in); } @@ -484,16 +470,6 @@ private Map readLocalMascotFileHash(String filepath) { getJob().warn("Fail to load database information " + filepath); } - finally - { - try - { - in.close(); - } - catch (IOException e) - { - } - } for(Map.Entry entry: results.entrySet()) { returns.put((String)entry.getKey(),(String)entry.getValue()); @@ -512,11 +488,10 @@ private boolean saveLocalMascotFileHash(String filepath, String hash, long files { Properties hashes = new Properties(); hashes.put(KEY_HASH, hash); - StringBuffer sb; - sb=new StringBuffer(); + StringBuilder sb=new StringBuilder(); sb.append(filesize); hashes.put(KEY_FILESIZE, sb.toString()); - sb=new StringBuffer(); + sb=new StringBuilder(); sb.append(timestamp); hashes.put(KEY_TIMESTAMP, sb.toString()); diff --git a/ms2/src/org/labkey/ms2/pipeline/sequest/AbstractSequestSearchTaskFactory.java b/ms2/src/org/labkey/ms2/pipeline/sequest/AbstractSequestSearchTaskFactory.java index 1bb11fa29..b24e18283 100644 --- a/ms2/src/org/labkey/ms2/pipeline/sequest/AbstractSequestSearchTaskFactory.java +++ b/ms2/src/org/labkey/ms2/pipeline/sequest/AbstractSequestSearchTaskFactory.java @@ -21,6 +21,7 @@ import org.labkey.ms2.pipeline.AbstractMS2SearchPipelineJob; import org.labkey.ms2.pipeline.AbstractMS2SearchTaskFactory; import org.labkey.ms2.pipeline.TPPTask; +import org.labkey.vfs.FileLike; import org.springframework.beans.factory.InitializingBean; import java.io.File; @@ -48,7 +49,7 @@ public boolean isJobComplete(PipelineJob job) AbstractMS2SearchPipelineJob support = (AbstractMS2SearchPipelineJob) job; String baseName = support.getBaseName(); String baseNameJoined = support.getJoinedBaseName(); - File dirAnalysis = support.getAnalysisDirectory(); + FileLike dirAnalysis = support.getAnalysisDirectory(); // Fraction roll-up, completely analyzed sample pepXML, or the raw pepXML exist return NetworkDrive.exists(TPPTask.getPepXMLFile(dirAnalysis, baseNameJoined)) || diff --git a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestParamsBuilder.java b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestParamsBuilder.java index c979990ed..21bc18219 100644 --- a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestParamsBuilder.java +++ b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestParamsBuilder.java @@ -25,16 +25,18 @@ import org.labkey.api.module.ModuleLoader; import org.labkey.api.pipeline.ParamParser; import org.labkey.api.pipeline.PipelineJobService; +import org.labkey.api.util.FileUtil; import org.labkey.api.util.JunitUtil; import org.labkey.api.util.Pair; +import org.labkey.api.writer.PrintWriters; import org.labkey.ms2.MS2Module; import org.labkey.ms2.pipeline.AbstractMS2SearchTask; import org.labkey.ms2.pipeline.MS2PipelineManager; import org.labkey.ms2.pipeline.ParameterNames; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; @@ -57,24 +59,24 @@ public abstract class SequestParamsBuilder public static final String DUMMY_FASTA_NAME = "~~~~~~~DUMMY_FASTA_NAME_FOR_TESTING~~~~~~~~~~`````.fasta"; protected Map sequestInputParams; - File sequenceRoot; + FileLike sequenceRoot; char[] _validResidues = {'A', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'Y', 'X', 'B', 'Z', 'O','[',']'}; protected HashMap supportedEnzymes = new HashMap<>(); protected final AbstractSequestParams _params; protected final AbstractSequestParams.Variant _variant; - private final List _databaseFiles; + private final List _databaseFiles; - public SequestParamsBuilder(Map sequestInputParams, File sequenceRoot) + public SequestParamsBuilder(Map sequestInputParams, FileLike sequenceRoot) { this(sequestInputParams, sequenceRoot, SequestParams.Variant.thermosequest); } - public SequestParamsBuilder(Map sequestInputParams, File sequenceRoot, SequestParams.Variant variant) + public SequestParamsBuilder(Map sequestInputParams, FileLike sequenceRoot, SequestParams.Variant variant) { this(sequestInputParams, sequenceRoot, variant, null); } - public SequestParamsBuilder(Map sequestInputParams, File sequenceRoot, SequestParams.Variant variant, List databaseFiles) + public SequestParamsBuilder(Map sequestInputParams, FileLike sequenceRoot, SequestParams.Variant variant, List databaseFiles) { _variant = variant; _params = createSequestParams(variant); @@ -149,7 +151,7 @@ public char[] getValidResidues() protected List initDatabases() { - List databaseFiles = _databaseFiles; + List databaseFiles = _databaseFiles; if (databaseFiles == null) { databaseFiles = new ArrayList<>(); @@ -171,12 +173,12 @@ protected List initDatabases() } Param database1 = _params.getFASTAParam(); - File databaseFile = databaseFiles.get(0); + FileLike databaseFile = databaseFiles.get(0); if (!databaseFile.exists() && !DUMMY_FASTA_NAME.equals(databaseFile.getName())) { return Collections.singletonList("pipeline, database; The database does not exist(" + databaseFile + ")"); } - database1.setValue(databaseFile.getAbsolutePath()); + database1.setValue(databaseFile.toNioPathForRead().toFile().getAbsolutePath()); if (databaseFiles.size() > 1) { @@ -189,7 +191,7 @@ protected List initDatabases() { return Collections.singletonList("pipeline, database; The database does not exist(" + databaseFile + ")"); } - database2.setValue(databaseFile.getAbsolutePath()); + database2.setValue(databaseFile.toNioPathForRead().toFile().getAbsolutePath()); } } return Collections.emptyList(); @@ -217,12 +219,12 @@ protected List initPeptideMassTolerance() return Collections.emptyList(); } } - if (plusValueString == null || minusValueString == null || !plusValueString.equals(minusValueString)) + if (plusValueString == null || !plusValueString.equals(minusValueString)) { return Collections.singletonList("Sequest does not support asymmetric parent error ranges (minus=" + minusValueString + " plus=" + plusValueString + ")."); } - if (plusValueString.isEmpty() && minusValueString.isEmpty()) + if (plusValueString.isEmpty()) { return Collections.singletonList("No values were entered for spectrum, parent monoisotopic mass error minus/plus."); } @@ -850,100 +852,6 @@ public boolean isValidResidue(String residueString) } return true; } - //The Sequest2xml uses an older version of the sequest.params file(version = 1)supported sequest uses version = 2; - String lookUpEnzyme(String enzyme) - { - char bracket2a = '{'; - char bracket2b = '}'; - int offset = 0; - CharSequence cutSites; - CharSequence blockSites; - - try - { - cutSites = enzyme.subSequence(enzyme.indexOf('[') + 1, enzyme.indexOf(']')); - } - catch (IndexOutOfBoundsException e) - { - cutSites = new StringBuilder(); - } - if (enzyme.lastIndexOf('[') != enzyme.indexOf('[')) - { - bracket2a = '['; - bracket2b = ']'; - offset = enzyme.indexOf(']') + 1; - } - - try - { - int startIndex = enzyme.indexOf(bracket2a, offset) + 1; - int endIndex = enzyme.indexOf(bracket2b, offset); - blockSites = enzyme.substring(startIndex, endIndex); - } - catch (IndexOutOfBoundsException e) - { - blockSites = new StringBuilder(); - } - - Set supportedEnzymesKes = supportedEnzymes.keySet(); - boolean matches = false; - for (String lookUp : supportedEnzymesKes) - { - String lookUpBlocks; - String lookUpCuts; - - try - { - lookUpCuts = lookUp.substring(lookUp.indexOf('[') + 1, lookUp.indexOf(']')); - } - catch (IndexOutOfBoundsException e) - { - lookUpCuts = ""; - } - - - try - { - int startIndex = lookUp.indexOf(bracket2a, offset) + 1; - int endIndex = lookUp.indexOf(bracket2b, offset); - lookUpBlocks = lookUp.substring(startIndex, endIndex); - } - catch (IndexOutOfBoundsException e) - { - lookUpBlocks = ""; - } - - if (lookUpCuts.length() == cutSites.length()) - { - matches = true; - for (int i = 0; i < cutSites.length(); i++) - { - if (lookUpCuts.indexOf(cutSites.charAt(i)) < 0) - { - matches = false; - } - } - if (matches && - lookUpBlocks.length() == blockSites.length()) - { - if (blockSites.isEmpty()) break; - for (int i = 0; i < blockSites.length(); i++) - { - if (lookUpBlocks.indexOf(blockSites.charAt(i)) < 0) - { - matches = false; - } - } - } - else - { - matches = false; - } - } - if (matches) return supportedEnzymes.get(lookUp); - } - return null; - } //Used with JUnit public AbstractSequestParams getProperties() @@ -1046,14 +954,27 @@ public abstract static class AbstractSequestTestCase extends Assert protected SequestParamsBuilder spb; protected ParamParser ip; protected String dbPath; - protected File root; + public static final FileLike ROOT; + + static + { + FileLike root; + try + { + root = FileSystemLike.wrapFile(JunitUtil.getSampleData(ModuleLoader.getInstance().getModule(MS2Module.class), "xarfiles/ms2pipe/databases")); + } + catch (IOException e) + { + root = null; + } + ROOT = root; + } @Before public void setUp() throws Exception { ip = PipelineJobService.get().createParamParser(); - root = JunitUtil.getSampleData(ModuleLoader.getInstance().getModule(MS2Module.class), "xarfiles/ms2pipe/databases"); - dbPath = root.getCanonicalPath(); + dbPath = FileUtil.getAbsoluteCaseSensitiveFile(ROOT.toNioPathForRead().toFile()).getAbsolutePath(); spb = createParamsBuilder(); } @@ -1078,9 +999,9 @@ public void parseParams(String xml) public abstract SequestParamsBuilder createParamsBuilder(); } - public void writeFile(File output) throws SequestParamsException + public void writeFile(FileLike output) throws SequestParamsException { - try (BufferedWriter writer = new BufferedWriter(new FileWriter(output))) + try (BufferedWriter writer = new BufferedWriter(PrintWriters.getPrintWriter(output.openOutputStream()))) { writer.write(getSequestParamsText()); } diff --git a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestPipelineJob.java b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestPipelineJob.java index 1bb8af055..ca3f45a5f 100644 --- a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestPipelineJob.java +++ b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestPipelineJob.java @@ -23,7 +23,6 @@ import org.labkey.ms2.pipeline.AbstractMS2SearchPipelineJob; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.List; @@ -39,7 +38,7 @@ public class SequestPipelineJob extends AbstractMS2SearchPipelineJob public static final TaskId TASK_ID = new TaskId(SequestPipelineJob.class); @JsonCreator - protected SequestPipelineJob(@JsonProperty("_dirSequenceRoot") File dirSequenceRoot) + protected SequestPipelineJob(@JsonProperty("_dirSequenceRoot") FileLike dirSequenceRoot) { super(dirSequenceRoot); } @@ -82,7 +81,7 @@ public boolean isRefreshRequired() } @Override - public File getSearchNativeOutputFile() + public FileLike getSearchNativeOutputFile() { return SequestSearchTask.getNativeOutputFile(getAnalysisDirectory(), getBaseName(), getGZPreference()); } diff --git a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchProtocol.java b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchProtocol.java index 3a188a673..2e4a9680a 100644 --- a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchProtocol.java +++ b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchProtocol.java @@ -20,14 +20,11 @@ import org.labkey.api.data.Container; import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.pipeline.PipelineValidationException; -import org.labkey.api.util.FileUtil; import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.ms2.pipeline.AbstractMS2SearchProtocol; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; -import java.nio.file.Path; import java.util.List; import java.util.Map; @@ -66,7 +63,7 @@ public void validate(PipeRoot root) throws PipelineValidationException if(dbNames.isEmpty()) throw new IllegalArgumentException("A sequence database must be selected."); - File fileSequenceDB = FileUtil.appendPath(getDirSeqRoot(), org.labkey.api.util.Path.parse(dbNames.get(0))); + FileLike fileSequenceDB = getDirSeqRoot().resolveFile(org.labkey.api.util.Path.parse(dbNames.get(0))); if (!fileSequenceDB.exists()) throw new IllegalArgumentException("Sequence database '" + dbNames.get(0) + "' is not found in local FASTA root."); diff --git a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchTask.java b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchTask.java index 3c15f3331..0963ecefd 100644 --- a/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/sequest/SequestSearchTask.java @@ -29,12 +29,16 @@ import org.labkey.api.util.FileType; import org.labkey.api.util.FileUtil; import org.labkey.api.util.GUID; +import org.labkey.api.util.Path; import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.writer.PrintWriters; import org.labkey.ms2.pipeline.AbstractMS2SearchPipelineJob; import org.labkey.ms2.pipeline.AbstractMS2SearchProtocol; import org.labkey.ms2.pipeline.AbstractMS2SearchTask; import org.labkey.ms2.pipeline.TPPTask; import org.labkey.ms2.pipeline.ParameterNames; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.BufferedReader; import java.io.BufferedWriter; @@ -81,7 +85,7 @@ public class SequestSearchTask extends AbstractMS2SearchTask params = getJob().getParameters(); String indexFileName = params.get(INDEX_FILE_NAME_PARAMETER_NAME); @@ -141,9 +145,9 @@ private File getIndexFileWithoutExtension() throws PipelineJobException ".StaticMod-" + params.get(ParameterNames.STATIC_MOD) + ".FASTAModified-" + - fastaFile.lastModified() + + fastaFile.getLastModified() + ".FASTASize-" + - fastaFile.length(); + fastaFile.getSize(); CRC32 crc = new CRC32(); crc.update(toBytes(sb)); @@ -151,15 +155,15 @@ private File getIndexFileWithoutExtension() throws PipelineJobException indexFileName = fastaFile.getName() + "_" + crc.getValue(); } - String relativeDirPath = FileUtil.relativePath(fastaFile.getParentFile().getPath(), fastaRoot.getPath()); - File indexDir; + String relativeDirPath = FileUtil.relativePath(fastaFile.getParent().toNioPathForRead().toFile().getPath(), fastaRoot.toNioPathForRead().toFile().getPath()); + FileLike indexDir; if (_factory.getIndexRootDir() == null) { - indexDir = new File(new File(fastaRoot, relativeDirPath), "index"); + indexDir = fastaRoot.resolveFile(Path.parse(relativeDirPath)).resolveChild("index"); } else { - indexDir = new File(new File(_factory.getIndexRootDir()), relativeDirPath); + indexDir = FileSystemLike.wrapFile(new File(new File(_factory.getIndexRootDir()), relativeDirPath)); } try { @@ -174,7 +178,7 @@ private File getIndexFileWithoutExtension() throws PipelineJobException throw new PipelineJobException("Failed to create index directory " + indexDir); } - return new File(indexDir, indexFileName); + return indexDir.resolveChild(indexFileName); } private static byte[] toBytes(String s) @@ -189,27 +193,27 @@ private boolean usesIndex() return "true".equalsIgnoreCase(indexUsage) || "1".equalsIgnoreCase(indexUsage) || "yes".equalsIgnoreCase(indexUsage); } - private List getFASTAOrIndexFiles(List actions) throws PipelineJobException + private List getFASTAOrIndexFiles(List actions) throws PipelineJobException { if (!usesIndex()) { - return Arrays.asList(getJob().getSequenceFiles()); + return getJob().getSequenceFiles(); } - File indexFileBase = getIndexFileWithoutExtension(); - File indexFile = new File(indexFileBase.getParentFile(), indexFileBase.getName() + INDEX_FILE_TYPE.getDefaultSuffix()); + FileLike indexFileBase = getIndexFileWithoutExtension(); + FileLike indexFile = indexFileBase.getParent().resolveChild(indexFileBase.getName() + INDEX_FILE_TYPE.getDefaultSuffix()); synchronized (INDEX_LOCK) { if (!indexFile.exists()) { - assert getJob().getSequenceFiles().length == 1 : "Only one FASTA is supported when using indices"; + assert getJob().getSequenceFiles().size() == 1 : "Only one FASTA is supported when using indices"; getJob().setStatus("CREATING FASTA INDEX"); - getJob().info("Creating a FASTA index for " + getJob().getSequenceFiles()[0] + " as " + indexFileBase); + getJob().info("Creating a FASTA index for " + getJob().getSequenceFiles().get(0) + " as " + indexFileBase); // Create a makedb.params to control the index creation - File fileWorkParams = _wd.newFile(MAKE_DB_PARAMS); + FileLike fileWorkParams = _wd.newFile(MAKE_DB_PARAMS); SequestParamsBuilder builder = new ThermoSequestParamsBuilder(getJob().getParameters(), getJob().getSequenceRootDirectory(), SequestParams.Variant.makedb, null); builder.initXmlValues(); builder.writeFile(fileWorkParams); @@ -219,15 +223,15 @@ private List getFASTAOrIndexFiles(List actions) throws Pip File makeDBExecutable = FileUtil.appendName(_factory.getSequestInstallDirAsFile(), "makedb"); args.add(makeDBExecutable.getAbsolutePath()); args.add("-O" + indexFileBase); - args.add("-P" + fileWorkParams.getAbsolutePath()); + args.add("-P" + fileWorkParams.toNioPathForRead().toFile().getAbsolutePath()); ProcessBuilder pb = new ProcessBuilder(args); // In order to find sort.exe, use the Sequest directory as the working directory File dir = makeDBExecutable.getParentFile(); - getJob().runSubProcess(pb, dir); + getJob().runSubProcess(pb, FileSystemLike.wrapFile(dir)); RecordedAction action = new RecordedAction(MAKEDB_ACTION_NAME); - action.addInput(getJob().getSequenceFiles()[0], "FASTA"); + action.addInput(getJob().getSequenceFiles().get(0), "FASTA"); action.addInput(fileWorkParams, "MakeDB Params"); action.addOutput(indexFile, "FASTA Index", false); action.addParameter(RecordedAction.COMMAND_LINE_PARAM, StringUtils.join(args, " ")); @@ -264,26 +268,26 @@ public RecordedActionSet run() throws PipelineJobException params.put("search, useremail", params.get(PipelineJob.PIPELINE_USERNAME_PARAM)); params.put("search, username", "CPAS User"); - List sequenceFiles = getFASTAOrIndexFiles(actions); + List sequenceFiles = getFASTAOrIndexFiles(actions); // Don't let the total path name get too long. The actual name doesn't matter much, but we need // to avoid collisions so we can't just truncate the path after n characters boolean useGUIDFilename = getJob().getBaseName().length() > 20; String dtaDirName = useGUIDFilename ? GUID.makeGUID() : getJob().getBaseName(); - File dirOutputDta = new File(_wd.getDir(), dtaDirName); - File fileMzXML = _factory.findInputFile(getJob()); + FileLike dirOutputDta = _wd.getDir().resolveChild(dtaDirName); + FileLike fileMzXML = _factory.findInputFile(getJob()); String tppVersion = TPPTask.getTPPVersion(getJob()); // out2xml will need the mzXML file in the parent directory of the DTA directory in order to look up // retention times, so make a copy in the right place - File localMzXML = _wd.inputFile(fileMzXML, true); + FileLike localMzXML = _wd.inputFile(fileMzXML, true); // Translate the mzXML file to dta using MzXML2Search convertToDTA(params, dirOutputDta, localMzXML, tppVersion, actions); - File dtaListFile = writeDtaList(dirOutputDta); + FileLike dtaListFile = writeDtaList(dirOutputDta); // Write out sequest.params file - File fileWorkParams = _wd.newFile(SEQUEST_PARAMS); + FileLike fileWorkParams = _wd.newFile(SEQUEST_PARAMS); SequestParamsBuilder builder = new ThermoSequestParamsBuilder(params, getJob().getSequenceRootDirectory(), SequestParams.Variant.thermosequest, sequenceFiles); builder.initXmlValues(); @@ -291,19 +295,19 @@ public RecordedActionSet run() throws PipelineJobException // Have a copy in both the work directory to retain with the results, and in the dta subdirectory for // Sequest to use - FileUtils.copyFileToDirectory(fileWorkParams, dirOutputDta); + FileUtil.copyFile(fileWorkParams, dirOutputDta.resolveChild(fileWorkParams.getName())); // Perform Sequest search List sequestArgs = new ArrayList<>(); File sequestExecutable = FileUtil.appendName(_factory.getSequestInstallDirAsFile(), "sequest"); sequestArgs.add(sequestExecutable.getAbsolutePath()); sequestArgs.addAll(_factory.getSequestOptions()); - sequestArgs.add("-R" + dtaListFile.getAbsolutePath()); - sequestArgs.add("-F" + dirOutputDta.getAbsolutePath()); + sequestArgs.add("-R" + dtaListFile.toNioPathForRead().toFile().getAbsolutePath()); + sequestArgs.add("-F" + dirOutputDta.toNioPathForRead().toFile().getAbsolutePath()); // Trailing argument that makes Sequest not barf sequestArgs.add("x"); ProcessBuilder sequestPB = new ProcessBuilder(sequestArgs); - File sequestLogFileWork = SEQUEST_LOG_FILE_TYPE.getFile(_wd.getDir(), getJob().getBaseName()); + FileLike sequestLogFileWork = SEQUEST_LOG_FILE_TYPE.getFile(_wd.getDir(), getJob().getBaseName()); _wd.newFile(sequestLogFileWork.getName()); boolean copySequestLogFile = true; try @@ -312,7 +316,7 @@ public RecordedActionSet run() throws PipelineJobException // out2xml assumes that the mzXML file base name will match the DTA directory name, so rename the file // temporarily - File guidMzXMLFile = FileUtil.appendName(localMzXML.getParentFile(), AbstractMS2SearchProtocol.FT_MZXML.getDefaultName(dtaDirName)); + FileLike guidMzXMLFile = localMzXML.getParent().resolveChild(AbstractMS2SearchProtocol.FT_MZXML.getDefaultName(dtaDirName)); if (useGUIDFilename) { localMzXML.renameTo(guidMzXMLFile); @@ -339,12 +343,12 @@ public RecordedActionSet run() throws PipelineJobException guidMzXMLFile.renameTo(localMzXML); } - File pepXmlFile = TPPTask.getPepXMLFile(_wd.getDir(), getJob().getBaseName()); + FileLike pepXmlFile = TPPTask.getPepXMLFile(_wd.getDir(), getJob().getBaseName()); if (!pepXmlFile.exists()) { // If we used an alternative name to keep the path from getting too long, rename the resulting pepXML // to match our standard convention - File altPepXmlFile = TPPTask.getPepXMLFile(_wd.getDir(), dtaDirName); + FileLike altPepXmlFile = TPPTask.getPepXMLFile(_wd.getDir(), dtaDirName); if (altPepXmlFile.exists()) { altPepXmlFile.renameTo(pepXmlFile); @@ -356,9 +360,9 @@ public RecordedActionSet run() throws PipelineJobException FileUtil.deleteDir(dirOutputDta); if (dirOutputDta.exists()) - throw new IOException("Failed to delete DTA directory " + dirOutputDta.getAbsolutePath()); + throw new IOException("Failed to delete DTA directory " + dirOutputDta); - File fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), + FileLike fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), getJob().getBaseName(), getJob().getGZPreference()); @@ -369,8 +373,8 @@ public RecordedActionSet run() throws PipelineJobException { assert sequenceFiles.size() == 1; // We want the pepXML file to point at the FASTA file, not at the indexed copy - String indexPath = sequenceFiles.get(0).getAbsolutePath(); - String fastaPath = getJob().getSequenceFiles()[0].getAbsolutePath(); + String indexPath = sequenceFiles.get(0).toNioPathForRead().toFile().getAbsolutePath(); + String fastaPath = getJob().getSequenceFiles().get(0).toNioPathForRead().toFile().getAbsolutePath(); replacements.put(indexPath, fastaPath); getJob().info("Replacing index path (" + indexPath + ") with FASTA path (" + fastaPath + ")"); } @@ -403,12 +407,12 @@ public RecordedActionSet run() throws PipelineJobException RecordedAction sequestAction = new RecordedAction(SEQUEST_ACTION_NAME); sequestAction.addParameter(RecordedAction.COMMAND_LINE_PARAM, StringUtils.join(sequestArgs, " ")); // Copy to a name that's unique to this file and won't conflict between searches in the same directory - File jobSpecificSequestParamsFile = SEQUEST_PARAMS_FILE_TYPE.getFile(fileWorkParams.getParentFile(), getJob().getBaseName()); - FileUtils.moveFile(fileWorkParams, jobSpecificSequestParamsFile); + FileLike jobSpecificSequestParamsFile = SEQUEST_PARAMS_FILE_TYPE.getFile(fileWorkParams.getParent(), getJob().getBaseName()); + fileWorkParams.move(jobSpecificSequestParamsFile); sequestAction.addOutput(_wd.outputFile(jobSpecificSequestParamsFile), "SequestParams", true); sequestAction.addOutput(_wd.outputFile(fileWorkPepXMLRaw), "RawPepXML", true); sequestAction.addOutput(_wd.outputFile(sequestLogFileWork), "SequestLog", false); - for (File file : sequenceFiles) + for (FileLike file : sequenceFiles) { sequestAction.addInput(file, FASTA_INPUT_ROLE); } @@ -439,11 +443,11 @@ public RecordedActionSet run() throws PipelineJobException * Rewrite the pepXML file so that it points to the FASTA file instead of the index file because the TPP and * the MS2 loading code don't know how to parse the index files. */ - private void rewritePepXML(File fileWorkPepXMLRaw, File pepXmlFile, Map substitutions) throws PipelineJobException + private void rewritePepXML(FileLike fileWorkPepXMLRaw, FileLike pepXmlFile, Map substitutions) throws PipelineJobException, IOException { - try (InputStream fIn = new FileInputStream(pepXmlFile); + try (InputStream fIn = pepXmlFile.openInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(fIn)); - OutputStream fOut = new FileOutputStream(fileWorkPepXMLRaw); + OutputStream fOut = fileWorkPepXMLRaw.openOutputStream(); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fOut))) { String line; @@ -469,7 +473,7 @@ private void rewritePepXML(File fileWorkPepXMLRaw, File pepXmlFile, Map params, File dirOutputDta, File fileMzXML, String tppVersion, List actions) + private void convertToDTA(Map params, FileLike dirOutputDta, FileLike fileMzXML, String tppVersion, List actions) throws IOException, PipelineJobException { if (!FileUtil.mkdir(dirOutputDta)) @@ -481,7 +485,7 @@ private void convertToDTA(Map params, File dirOutputDta, File fi Mzxml2SearchParams mzXml2SearchParams = new Mzxml2SearchParams(); Collection inputXmlParams = convertParams(mzXml2SearchParams.getParams(), params); mzXML2SearchArgs.addAll(inputXmlParams); - mzXML2SearchArgs.add(fileMzXML.getAbsolutePath()); + mzXML2SearchArgs.add(fileMzXML.toNioPathForRead().toFile().getAbsolutePath()); RecordedAction action = new RecordedAction(MZXML2SEARCH_ACTION_NAME); action.addParameter(RecordedAction.COMMAND_LINE_PARAM, StringUtils.join(mzXML2SearchArgs, " ")); @@ -493,14 +497,14 @@ private void convertToDTA(Map params, File dirOutputDta, File fi getJob().runSubProcess(new ProcessBuilder(mzXML2SearchArgs), _wd.getDir()); } - private File writeDtaList(File dirOutputDta) throws IOException + private FileLike writeDtaList(FileLike dirOutputDta) throws IOException { - File[] dtaFiles = dirOutputDta.listFiles((dir, name) -> name.toLowerCase().endsWith(".dta")); - File result = new File(dirOutputDta, "DtaFiles.txt"); - try (OutputStream out = new FileOutputStream(result); - PrintWriter writer = new PrintWriter(out)) + List dtaFiles = dirOutputDta.getChildren(f -> f.getName().toLowerCase().endsWith(".dta")); + FileLike result = dirOutputDta.resolveChild("DtaFiles.txt"); + try (OutputStream out = result.openOutputStream(); + PrintWriter writer = PrintWriters.getPrintWriter(out)) { - for (File dtaFile : dtaFiles) + for (FileLike dtaFile : dtaFiles) { writer.println(dtaFile.getName()); } diff --git a/ms2/src/org/labkey/ms2/pipeline/sequest/ThermoSequestParamsBuilder.java b/ms2/src/org/labkey/ms2/pipeline/sequest/ThermoSequestParamsBuilder.java index 9a96032e9..e32b740b6 100644 --- a/ms2/src/org/labkey/ms2/pipeline/sequest/ThermoSequestParamsBuilder.java +++ b/ms2/src/org/labkey/ms2/pipeline/sequest/ThermoSequestParamsBuilder.java @@ -16,10 +16,11 @@ package org.labkey.ms2.pipeline.sequest; import org.junit.Test; +import org.labkey.api.util.FileUtil; import org.labkey.ms2.pipeline.ParameterNames; +import org.labkey.vfs.FileLike; import java.io.File; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -33,12 +34,12 @@ */ public class ThermoSequestParamsBuilder extends SequestParamsBuilder { - public ThermoSequestParamsBuilder(Map sequestInputParams, File sequenceRoot) + public ThermoSequestParamsBuilder(Map sequestInputParams, FileLike sequenceRoot) { super(sequestInputParams, sequenceRoot); } - public ThermoSequestParamsBuilder(Map sequestInputParams, File sequenceRoot, SequestParams.Variant variant, List databaseFiles) + public ThermoSequestParamsBuilder(Map sequestInputParams, FileLike sequenceRoot, SequestParams.Variant variant, List databaseFiles) { super(sequestInputParams, sequenceRoot, variant, databaseFiles); } @@ -231,11 +232,11 @@ public static class TestCase extends AbstractSequestTestCase @Override public SequestParamsBuilder createParamsBuilder() { - return new ThermoSequestParamsBuilder(ip.getInputParameters(), root); + return new ThermoSequestParamsBuilder(ip.getInputParameters(), ROOT); } @Test - public void testInitDatabasesNormal() throws IOException + public void testInitDatabasesNormal() { String value = "Bovine_mini1.fasta"; parseParams("" + @@ -246,7 +247,7 @@ public void testInitDatabasesNormal() throws IOException List parserError = spb.initDatabases(); if (!parserError.isEmpty()) fail(parserError); Param sp = spb.getProperties().getFASTAParam(); - assertEquals(new File(dbPath + File.separator + value).getCanonicalPath(), new File(sp.getValue()).getCanonicalPath()); + assertEquals(FileUtil.getAbsoluteCaseSensitiveFile(new File(dbPath + File.separator + value)).getAbsolutePath(), FileUtil.getAbsoluteCaseSensitiveFile(new File(sp.getValue())).getAbsolutePath()); } @Test @@ -1543,7 +1544,7 @@ public void testGenerateFile() throws SequestParamsException paramMap.put(ParameterNames.SEQUENCE_DB, DUMMY_FASTA_NAME); // Value from UW version - make sure it doesn't get piped through paramMap.put("sequest, digest_mass_range", "400.0 5900.0"); - ThermoSequestParamsBuilder spb = new ThermoSequestParamsBuilder(paramMap, new File("fakeroot")); + ThermoSequestParamsBuilder spb = new ThermoSequestParamsBuilder(paramMap, AbstractSequestTestCase.ROOT); spb.initXmlValues(); String text = spb.getSequestParamsText(); assertTrue(text.contains("database_name =")); diff --git a/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemPipelineJob.java b/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemPipelineJob.java index 00e65a8f2..faf3c8cac 100644 --- a/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemPipelineJob.java +++ b/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemPipelineJob.java @@ -24,7 +24,6 @@ import org.labkey.ms2.pipeline.AbstractMS2SearchPipelineJob; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.List; @@ -47,7 +46,7 @@ public Logger getClassLogger() } @JsonCreator - protected XTandemPipelineJob(@JsonProperty("_dirSequenceRoot") File dirSequenceRoot) + protected XTandemPipelineJob(@JsonProperty("_dirSequenceRoot") FileLike dirSequenceRoot) { super(dirSequenceRoot); } @@ -95,9 +94,9 @@ public boolean isProphetEnabled() "k-score".equals(paramScore)); } - // if fire does not exist, will append .gz if config indicates preference for gzipped outputs + // if file does not exist, will append .gz if config indicates preference for gzipped outputs @Override - public File getSearchNativeOutputFile() + public FileLike getSearchNativeOutputFile() { return XTandemSearchTask.getNativeOutputFile(getAnalysisDirectory(), getBaseName(), getGZPreference()); } diff --git a/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemSearchTask.java b/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemSearchTask.java index 4faccac6a..c9ade6b3a 100644 --- a/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemSearchTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemSearchTask.java @@ -25,11 +25,13 @@ import org.labkey.api.pipeline.WorkDirectory; import org.labkey.api.util.FileType; import org.labkey.api.util.NetworkDrive; +import org.labkey.api.writer.PrintWriters; import org.labkey.ms2.pipeline.AbstractMS2SearchPipelineJob; import org.labkey.ms2.pipeline.AbstractMS2SearchTask; import org.labkey.ms2.pipeline.AbstractMS2SearchTaskFactory; import org.labkey.ms2.pipeline.MS2SearchJobSupport; import org.labkey.ms2.pipeline.TPPTask; +import org.labkey.vfs.FileLike; import java.io.BufferedWriter; import java.io.File; @@ -63,7 +65,7 @@ public static FileType getNativeFileType(FileType.gzSupportLevel gzSupport) return new FileType(".xtan.xml", gzSupport); } // useful for naming an output file while honoring config preference for gzip output - public static File getNativeOutputFile(File dirAnalysis, String baseName, + public static FileLike getNativeOutputFile(FileLike dirAnalysis, String baseName, FileType.gzSupportLevel gzSupport) { return getNativeFileType(gzSupport).newFile(dirAnalysis, baseName); @@ -95,7 +97,7 @@ public boolean isJobComplete(PipelineJob job) { JobSupport support = (JobSupport) job; String baseName = support.getBaseName(); - File dirAnalysis = support.getAnalysisDirectory(); + FileLike dirAnalysis = support.getAnalysisDirectory(); // X! Tandem native output if (!NetworkDrive.exists(getNativeOutputFile(dirAnalysis, baseName, FileType.gzSupportLevel.SUPPORT_GZ))) @@ -144,13 +146,13 @@ public RecordedActionSet run() throws PipelineJobException // Avoid re-running an X! Tandem search, if the .xtan.xml already exists. // Several labs soft-link or copy .xtan.xml files to reduce processing time. ProcessBuilder xTandemPB = null; - File fileOutputXML = getNativeFileType(support.getGZPreference()).newFile(support.getAnalysisDirectory(), baseName); - File fileWorkOutputXML = null; - File fileJobTandemXML = null; + FileLike fileOutputXML = getNativeFileType(support.getGZPreference()).newFile(support.getAnalysisDirectory(), baseName); + FileLike fileWorkOutputXML = null; + FileLike fileJobTandemXML = null; boolean searchComplete = NetworkDrive.exists(fileOutputXML); - File fileMzXML = _factory.findInputFile(getJobSupport()); - File fileInputSpectra; + FileLike fileMzXML = _factory.findInputFile(getJobSupport()); + FileLike fileInputSpectra; try (WorkDirectory.CopyingResource lock = _wd.ensureCopyingLock()) { fileInputSpectra = _wd.inputFile(fileMzXML, false); @@ -161,14 +163,14 @@ public RecordedActionSet run() throws PipelineJobException if (!searchComplete) { fileWorkOutputXML = _wd.newFile(getNativeFileType(support.getGZPreference())); - File fileWorkParameters = _wd.newFile(INPUT_XML); - File fileWorkTaxonomy = _wd.newFile(TAXONOMY_XML); + FileLike fileWorkParameters = _wd.newFile(INPUT_XML); + FileLike fileWorkTaxonomy = _wd.newFile(TAXONOMY_XML); // CONSIDER: If the file stays in its original location, the absolute path // is used, to ensure the loader can find it. Better way? String pathSpectra; if (fileInputSpectra.equals(fileMzXML)) - pathSpectra = fileInputSpectra.getAbsolutePath(); + pathSpectra = fileInputSpectra.toNioPathForRead().toFile().getAbsolutePath(); else pathSpectra = _wd.getRelativePath(fileInputSpectra); @@ -198,7 +200,7 @@ public RecordedActionSet run() throws PipelineJobException _wd.discardFile(fileWorkTaxonomy); } - File fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), baseName, support.getGZPreference()); + FileLike fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), baseName, support.getGZPreference()); String ver = TPPTask.getTPPVersion(getJob()); String exePath = PipelineJobService.get().getExecutablePath("Tandem2XML", null, "tpp", ver, getJob().getLogger()); @@ -209,7 +211,7 @@ public RecordedActionSet run() throws PipelineJobException _wd.getDir()); // Move final outputs to analysis directory. - File filePepXMLRaw; + FileLike filePepXMLRaw; try (WorkDirectory.CopyingResource lock = _wd.ensureCopyingLock()) { if (!searchComplete) @@ -223,7 +225,7 @@ public RecordedActionSet run() throws PipelineJobException RecordedAction xtandemAction = new RecordedAction(X_TANDEM_ACTION_NAME); xtandemAction.addParameter(RecordedAction.COMMAND_LINE_PARAM, StringUtils.join(xTandemPB.command(), ' ')); xtandemAction.addInput(fileMzXML, SPECTRA_INPUT_ROLE); - for (File sequenceFile : getJobSupport().getSequenceFiles()) + for (FileLike sequenceFile : getJobSupport().getSequenceFiles()) { xtandemAction.addInput(sequenceFile, FASTA_INPUT_ROLE); } @@ -246,7 +248,7 @@ public RecordedActionSet run() throws PipelineJobException } } - public void writeRunParameters(String pathSpectra, File fileParameters, File fileTaxonomy, File fileWorkOutputXML) throws IOException + public void writeRunParameters(String pathSpectra, FileLike fileParameters, FileLike fileTaxonomy, FileLike fileWorkOutputXML) throws IOException { Map params = new HashMap<>(getJobSupport().getParameters()); @@ -291,23 +293,23 @@ public void writeRunParameters(String pathSpectra, File fileParameters, File fil } } - public void writeTaxonomy(File fileTaxonomy, String taxonName, File[] fileDatabases) throws IOException + public void writeTaxonomy(FileLike fileTaxonomy, String taxonName, List fileDatabases) throws IOException { StringBuilder taxonomyBuffer = new StringBuilder(); taxonomyBuffer.append("\n"); taxonomyBuffer.append("\n"); taxonomyBuffer.append(" \n"); - for (File fileDatabase : fileDatabases) + for (FileLike fileDatabase : fileDatabases) { taxonomyBuffer.append(" \n"); } taxonomyBuffer.append(" \n"); taxonomyBuffer.append("\n"); String taxonomyText = taxonomyBuffer.toString(); - try (BufferedWriter taxonomyWriter = new BufferedWriter(new FileWriter(fileTaxonomy))) + try (BufferedWriter taxonomyWriter = new BufferedWriter(PrintWriters.getPrintWriter(fileTaxonomy.openOutputStream()))) { String[] lines = taxonomyText.split("\n"); for (String line : lines) diff --git a/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemToXMLTask.java b/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemToXMLTask.java index 9526af5e9..af95b4a9e 100644 --- a/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemToXMLTask.java +++ b/ms2/src/org/labkey/ms2/pipeline/tandem/XTandemToXMLTask.java @@ -30,8 +30,8 @@ import org.labkey.ms2.pipeline.AbstractMS2SearchTask; import org.labkey.ms2.pipeline.AbstractMS2SearchTaskFactory; import org.labkey.ms2.pipeline.TPPTask; +import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -61,7 +61,7 @@ public boolean isJobComplete(PipelineJob job) { FileAnalysisJobSupport support = (FileAnalysisJobSupport) job; String baseName = support.getBaseName(); - File dirAnalysis = support.getAnalysisDirectory(); + FileLike dirAnalysis = support.getAnalysisDirectory(); // The raw pepXML exists return NetworkDrive.exists(AbstractMS2SearchPipelineJob.getPepXMLConvertFile(dirAnalysis, baseName)); @@ -99,12 +99,12 @@ public RecordedActionSet run() throws PipelineJobException FileAnalysisJobSupport support = getJobSupport(); String baseName = support.getBaseName(); - File fileOutputXML = XTandemSearchTask.getNativeOutputFile(support.getAnalysisDirectory(), baseName, FileType.gzSupportLevel.SUPPORT_GZ); + FileLike fileOutputXML = XTandemSearchTask.getNativeOutputFile(support.getAnalysisDirectory(), baseName, FileType.gzSupportLevel.SUPPORT_GZ); if (!fileOutputXML.isFile()) fileOutputXML = XTandemSearchTask.getNativeOutputFile(support.getDataDirectory(), baseName, FileType.gzSupportLevel.SUPPORT_GZ); - File fileWorkOutputXML = _wd.inputFile(fileOutputXML, false); + FileLike fileWorkOutputXML = _wd.inputFile(fileOutputXML, false); - File fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), baseName, support.getGZPreference()); + FileLike fileWorkPepXMLRaw = AbstractMS2SearchPipelineJob.getPepXMLConvertFile(_wd.getDir(), baseName, support.getGZPreference()); String ver = TPPTask.getTPPVersion(getJob()); String exePath = PipelineJobService.get().getExecutablePath("Tandem2XML", null, "tpp", ver, getJob().getLogger()); @@ -115,7 +115,7 @@ public RecordedActionSet run() throws PipelineJobException _wd.getDir()); // Move final outputs to analysis directory. - File filePepXMLRaw; + FileLike filePepXMLRaw; try (WorkDirectory.CopyingResource ignored = _wd.ensureCopyingLock()) { filePepXMLRaw = _wd.outputFile(fileWorkPepXMLRaw); diff --git a/ms2/src/org/labkey/ms2/reader/MascotDatLoader.java b/ms2/src/org/labkey/ms2/reader/MascotDatLoader.java index 4f2bd6f5c..f83c436a9 100644 --- a/ms2/src/org/labkey/ms2/reader/MascotDatLoader.java +++ b/ms2/src/org/labkey/ms2/reader/MascotDatLoader.java @@ -22,12 +22,14 @@ import org.labkey.api.data.Container; import org.labkey.api.util.FileUtil; import org.labkey.api.util.Pair; +import org.labkey.api.util.Path; import org.labkey.api.util.StringUtilsLabKey; import org.labkey.ms2.MS2Modification; import org.labkey.ms2.MS2RunType; import org.labkey.ms2.SpectrumException; import org.labkey.ms2.pipeline.MS2PipelineManager; import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import javax.xml.stream.XMLStreamException; import java.io.BufferedReader; @@ -351,8 +353,8 @@ public void loadParameters(PeptideFraction fraction, Container container) throws String dbFileName = _currentLine.substring(DB_PREFIX.length()).trim(); try { - File databaseFile = getDatabaseFile(container, dbFileName, null); - fraction.setDatabaseLocalPaths(Arrays.asList(databaseFile.getAbsolutePath())); + FileLike databaseFile = getDatabaseFile(container, dbFileName, null); + fraction.setDatabaseLocalPaths(Arrays.asList(databaseFile.toNioPathForRead().toFile().getAbsolutePath())); } catch (FileNotFoundException e) { @@ -791,8 +793,8 @@ public void loadHeader(PeptideFraction fraction, Container container) throws IOE if (matcher.matches()) { String s = matcher.group(1); - File databaseFile = getDatabaseFile(container, null, s.trim()); - fraction.getDatabaseLocalPaths().add(databaseFile.getAbsolutePath()); + FileLike databaseFile = getDatabaseFile(container, null, s.trim()); + fraction.getDatabaseLocalPaths().add(databaseFile.toNioPathForRead().toFile().getAbsolutePath()); } readLine(); } @@ -817,14 +819,14 @@ private boolean atEndOfSection() return eof() || _currentLine.matches(_boundaryMarker); } - public File getDatabaseFile(Container container, String dbName, String fastaFileName) throws FileNotFoundException + public FileLike getDatabaseFile(Container container, String dbName, String fastaFileName) throws FileNotFoundException { // Try looking for the "DB" value under the FASTA root - File dbRoot = MS2PipelineManager.getSequenceDatabaseRoot(container, true); + FileLike dbRoot = MS2PipelineManager.getSequenceDatabaseRoot(container, true); if (dbName != null) { // Mascot FASTA files may have been downloaded from the server into a ./mascot/X subdirectory, so seek it out - File file = findFile(dbRoot, dbName, 3); + FileLike file = findFile(dbRoot, dbName, 3); if (file != null) { return file; @@ -834,7 +836,7 @@ public File getDatabaseFile(Container container, String dbName, String fastaFile if (fastaFileName != null) { // Try using the full path and see if it resolves - File file = new File(fastaFileName); + FileLike file = FileSystemLike.wrapFile(new File(fastaFileName)); if (file.isFile()) { return file; @@ -857,7 +859,7 @@ public File getDatabaseFile(Container container, String dbName, String fastaFile @Nullable /** Look for the file up to maxDepth child directories under the current directory */ - private File findFile(File parent, String name, int maxDepth) + private FileLike findFile(FileLike parent, String name, int maxDepth) { // Stop looking, we've exceeded our maximum recursive depth if (maxDepth == 0) @@ -865,21 +867,18 @@ private File findFile(File parent, String name, int maxDepth) return null; } - File f = FileUtil.appendName(parent, name); + FileLike f = parent.resolveFile(Path.parse(name)); if (f.isFile()) { return f; } - File[] children = parent.listFiles(File::isDirectory); - if (children != null) + List children = parent.getChildren(FileLike::isDirectory); + for (FileLike child : children) { - for (File child : children) + f = findFile(child, name, maxDepth - 1); + if (f != null) { - f = findFile(child, name, maxDepth - 1); - if (f != null) - { - return f; - } + return f; } } return null; @@ -1069,7 +1068,7 @@ public void mergeQueryAndSummarySections(DatPeptide otherPeptide, boolean mergeS public class PeptideIterator implements Iterator { private DatPeptide _peptide = null; - private PeptideFraction _fraction; + private final PeptideFraction _fraction; public PeptideIterator(PeptideFraction fraction) { diff --git a/nab/src/org/labkey/nab/NabAssayController.java b/nab/src/org/labkey/nab/NabAssayController.java index 95c24ef72..c14f2effd 100644 --- a/nab/src/org/labkey/nab/NabAssayController.java +++ b/nab/src/org/labkey/nab/NabAssayController.java @@ -207,12 +207,12 @@ public void addNavTrail(NavTree root) } } - protected DilutionAssayProvider getProvider(ExpRun run) + protected DilutionAssayProvider getProvider(ExpRun run) { AssayProvider provider = AssayService.get().getProvider(run.getProtocol()); - if (!(provider instanceof DilutionAssayProvider)) + if (!(provider instanceof DilutionAssayProvider dap)) throw new NotFoundException("Run " + run.getRowId() + " is not a NAb run."); - return (DilutionAssayProvider) provider; + return dap; } protected DilutionDataHandler getDataHandler(ExpRun run) @@ -240,7 +240,7 @@ public ModelAndView getView(RenderAssayForm form, BindException errors) throws E { throw new NotFoundException("Data file for run " + run.getName() + " was not found. Deleted from the file system?"); } - PageFlowUtil.streamFile(getViewContext().getResponse(), file.toNioPathForRead().toFile(), true); + PageFlowUtil.streamFile(getViewContext().getResponse(), file, true); return null; } @@ -461,7 +461,7 @@ public void validateCommand(DeleteRunForm form, Errors errors) } @Override - public boolean handlePost(DeleteRunForm form, BindException errors) throws Exception + public boolean handlePost(DeleteRunForm form, BindException errors) { _run.delete(getUser()); return true; diff --git a/protein/api-src/org/labkey/api/protein/annotation/DefaultAnnotationLoader.java b/protein/api-src/org/labkey/api/protein/annotation/DefaultAnnotationLoader.java index 60c2a7082..3593d6f72 100644 --- a/protein/api-src/org/labkey/api/protein/annotation/DefaultAnnotationLoader.java +++ b/protein/api-src/org/labkey/api/protein/annotation/DefaultAnnotationLoader.java @@ -28,14 +28,13 @@ import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.vfs.FileLike; -import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Date; public abstract class DefaultAnnotationLoader extends PipelineJob { - protected File _file; + protected FileLike _file; protected String _comment = null; protected int currentInsertId = 0; @@ -45,7 +44,7 @@ public abstract class DefaultAnnotationLoader extends PipelineJob // For serialization protected DefaultAnnotationLoader() {} - public DefaultAnnotationLoader(File file, ViewBackgroundInfo info, PipeRoot pipeRoot) throws IOException + public DefaultAnnotationLoader(FileLike file, ViewBackgroundInfo info, PipeRoot pipeRoot) throws IOException { super(ProteinAnnotationPipelineProvider.NAME, info, pipeRoot); _file = FileUtil.getAbsoluteCaseSensitiveFile(file); @@ -105,7 +104,7 @@ public String getComment() return _comment; } - public File getFile() + public FileLike getFile() { return _file; } diff --git a/protein/api-src/org/labkey/api/protein/annotation/XMLProteinHandler.java b/protein/api-src/org/labkey/api/protein/annotation/XMLProteinHandler.java index 83b889854..fd2917e4a 100644 --- a/protein/api-src/org/labkey/api/protein/annotation/XMLProteinHandler.java +++ b/protein/api-src/org/labkey/api/protein/annotation/XMLProteinHandler.java @@ -19,13 +19,14 @@ import org.labkey.api.protein.uniprot.ParseActions; import org.labkey.api.protein.uniprot.ParseContext; import org.labkey.api.protein.uniprot.ParserTree; +import org.labkey.vfs.FileLike; import org.xml.sax.Attributes; +import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.XMLReaderFactory; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.util.HashSet; @@ -312,9 +313,9 @@ public void startDocument() _loader.handleThreadStateChangeRequests(); } - public void parse(File file) throws IOException, SAXException + public void parse(FileLike file) throws IOException, SAXException { - getParser().parse(file.getPath()); + getParser().parse(new InputSource(file.openInputStream())); } } diff --git a/protein/api-src/org/labkey/api/protein/annotation/XMLProteinLoader.java b/protein/api-src/org/labkey/api/protein/annotation/XMLProteinLoader.java index 292f4f75d..69db05885 100644 --- a/protein/api-src/org/labkey/api/protein/annotation/XMLProteinLoader.java +++ b/protein/api-src/org/labkey/api/protein/annotation/XMLProteinLoader.java @@ -22,9 +22,9 @@ import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.protein.ProteinSchema; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; import org.xml.sax.SAXException; -import java.io.File; import java.io.IOException; import java.sql.Connection; @@ -49,7 +49,7 @@ protected XMLProteinLoader(@JsonProperty("_clearExisting") boolean clearExisting _clearExisting = clearExisting; } - public XMLProteinLoader(File file, ViewBackgroundInfo info, PipeRoot root, boolean clearExisting) throws IOException + public XMLProteinLoader(FileLike file, ViewBackgroundInfo info, PipeRoot root, boolean clearExisting) throws IOException { super(file, info, root); _clearExisting = clearExisting; diff --git a/protein/api-src/org/labkey/api/protein/fasta/FastaDbLoader.java b/protein/api-src/org/labkey/api/protein/fasta/FastaDbLoader.java index befb46042..abaff4006 100644 --- a/protein/api-src/org/labkey/api/protein/fasta/FastaDbLoader.java +++ b/protein/api-src/org/labkey/api/protein/fasta/FastaDbLoader.java @@ -39,6 +39,8 @@ import org.labkey.api.util.NetworkDrive; import org.labkey.api.util.StringUtilsLabKey; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.io.FileNotFoundException; @@ -73,13 +75,13 @@ public class FastaDbLoader extends DefaultAnnotationLoader // For serialization protected FastaDbLoader() {} - public FastaDbLoader(File file, ViewBackgroundInfo info, PipeRoot root, String hash) throws IOException + public FastaDbLoader(FileLike file, ViewBackgroundInfo info, PipeRoot root, String hash) throws IOException { super(file, info, root); _fileHash = hash; } - public FastaDbLoader(File file, ViewBackgroundInfo info, PipeRoot root) throws IOException + public FastaDbLoader(FileLike file, ViewBackgroundInfo info, PipeRoot root) throws IOException { this(file, info, root, null); } @@ -160,7 +162,7 @@ public void parseFile(Logger logger) throws SQLException, IOException synchronized (LOCK) { FastaFile file = new FastaFile(); - file.setFilename(_file.getPath()); + file.setFilename(_file.toNioPathForRead().toFile().getPath()); file.setFileChecksum(_fileHash); file = Table.insert(null, ProteinSchema.getTableInfoFastaFiles(), file); associatedFastaId = file.getFastaId(); @@ -192,7 +194,7 @@ protected int initAnnotLoad(String comment) throws SQLException { if (currentInsertId == 0) { - fdbu._initialInsertionStmt.setString(1, _file.getPath()); + fdbu._initialInsertionStmt.setString(1, _file.toNioPathForRead().toFile().getPath()); if (comment == null) setComment(""); fdbu._initialInsertionStmt.setString(2, comment); fdbu._initialInsertionStmt.setString(3, defaultOrganism); @@ -268,7 +270,7 @@ protected void preProcessSequences(List mouthful, Connection c, Log String bestNameTmp = curSeq.getBestName(); fdbu._addSeqStmt.setString(6, bestNameTmp); //todo: rethink best name - fdbu._addSeqStmt.setString(7, baseFileName(_file.getPath())); + fdbu._addSeqStmt.setString(7, baseFileName(_file.toNioPathForRead().toFile().getPath())); fdbu._addSeqStmt.setString(8, curSeq.getProtein().getLookup()); if (curSeq.getGenus() == null) { @@ -476,7 +478,7 @@ protected int insertLookups(int fastaID) throws SQLException protected int guessAssociatedFastaId() throws SQLException { - String bfn = baseFileName(_file.getPath()); + String bfn = baseFileName(_file.toNioPathForRead().toFile().getPath()); try (ResultSet rs = new SqlSelector(ProteinSchema.getSchema(), "SELECT FastaId,FileName FROM " + ProteinSchema.getTableInfoFastaFiles()).getResultSet()) { @@ -699,25 +701,26 @@ public static String getCanonicalPath(File f) throws IOException public static synchronized int loadAnnotations(String path, String fileName, String defaultOrganism, boolean shouldGuess, Logger log, XarContext context) throws SQLException, IOException { File f = context.findFile(fileName, new File(path)); + FileLike file = FileSystemLike.wrapFile(f); if (f == null) { throw new FileNotFoundException(fileName); } String convertedName = getCanonicalPath(f); - String hash = HashHelpers.hashFileContents(f); + String hash = HashHelpers.hashFileContents(file); Collection files = new SqlSelector(ProteinSchema.getSchema(), "SELECT * FROM " + ProteinSchema.getTableInfoFastaFiles() + " WHERE FileChecksum = ? ORDER BY FastaId", hash).getCollection(FastaFile.class); FastaFile loadedFile = null; - for (FastaFile file : files) + for (FastaFile fastaFile : files) { - if (file.getLoaded() == null) + if (fastaFile.getLoaded() == null) { - ProteinManager.deleteFastaFile(file.getFastaId()); + ProteinManager.deleteFastaFile(fastaFile.getFastaId()); } else { - loadedFile = file; + loadedFile = fastaFile; } } @@ -736,7 +739,7 @@ public static synchronized int loadAnnotations(String path, String fileName, Str return loadedFile.getFastaId(); } - FastaDbLoader fdbl = new FastaDbLoader(f, new ViewBackgroundInfo(context.getContainer(), context.getUser(), null), null, hash); + FastaDbLoader fdbl = new FastaDbLoader(file, new ViewBackgroundInfo(context.getContainer(), context.getUser(), null), null, hash); fdbl.setComment(new java.util.Date() + " " + convertedName); fdbl.setDefaultOrganism(defaultOrganism); fdbl.setOrganismIsToGuessed(shouldGuess); diff --git a/protein/api-src/org/labkey/api/protein/fasta/FastaReloaderJob.java b/protein/api-src/org/labkey/api/protein/fasta/FastaReloaderJob.java index 72264a4f2..4d22e4adc 100644 --- a/protein/api-src/org/labkey/api/protein/fasta/FastaReloaderJob.java +++ b/protein/api-src/org/labkey/api/protein/fasta/FastaReloaderJob.java @@ -25,6 +25,7 @@ import org.labkey.api.util.FileUtil; import org.labkey.api.util.URLHelper; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.io.IOException; @@ -73,7 +74,7 @@ public void run() String filename = fasta.getFilename(); info("Processing FASTA " + filename); - FastaDbLoader fdbl = new FastaDbLoader(new File(filename), getInfo(), getPipeRoot()); + FastaDbLoader fdbl = new FastaDbLoader(FileSystemLike.wrapFile(new File(filename)), getInfo(), getPipeRoot()); fdbl.setComment(new java.util.Date() + " " + filename); fdbl.setDefaultOrganism(FastaDbLoader.UNKNOWN_ORGANISM); fdbl.setOrganismIsToGuessed(true); diff --git a/protein/api-src/org/labkey/api/protein/fasta/FastaValidator.java b/protein/api-src/org/labkey/api/protein/fasta/FastaValidator.java index 0598c1d1b..8ac6d155e 100644 --- a/protein/api-src/org/labkey/api/protein/fasta/FastaValidator.java +++ b/protein/api-src/org/labkey/api/protein/fasta/FastaValidator.java @@ -16,8 +16,8 @@ package org.labkey.api.protein.fasta; import org.apache.commons.collections4.trie.PatriciaTrie; +import org.labkey.vfs.FileLike; -import java.io.File; import java.text.DecimalFormat; import java.text.Format; import java.util.ArrayList; @@ -33,7 +33,7 @@ public FastaValidator() } /** Determine if FASTA file has any duplicate protein names **/ - public List validate(File fastaFile) + public List validate(FileLike fastaFile) { List errors = new ArrayList<>(); Format lineFormat = DecimalFormat.getIntegerInstance(); diff --git a/protein/api-src/org/labkey/api/protein/fasta/ProteinFastaLoader.java b/protein/api-src/org/labkey/api/protein/fasta/ProteinFastaLoader.java index d0fbd0c0d..ae67d2a97 100644 --- a/protein/api-src/org/labkey/api/protein/fasta/ProteinFastaLoader.java +++ b/protein/api-src/org/labkey/api/protein/fasta/ProteinFastaLoader.java @@ -17,12 +17,11 @@ import org.jetbrains.annotations.NotNull; import org.labkey.api.reader.FastaLoader; - -import java.io.File; +import org.labkey.vfs.FileLike; public class ProteinFastaLoader extends FastaLoader implements Iterable { - public ProteinFastaLoader(File fastaFile) + public ProteinFastaLoader(FileLike fastaFile) { super(fastaFile, FastaProtein::new); } diff --git a/protein/api-src/org/labkey/api/protein/uniprot/ParseActions.java b/protein/api-src/org/labkey/api/protein/uniprot/ParseActions.java index 9a83517cb..807622565 100644 --- a/protein/api-src/org/labkey/api/protein/uniprot/ParseActions.java +++ b/protein/api-src/org/labkey/api/protein/uniprot/ParseActions.java @@ -16,14 +16,13 @@ package org.labkey.api.protein.uniprot; +import org.labkey.vfs.FileLike; import org.xml.sax.Attributes; import org.xml.sax.SAXException; -import java.io.File; - public abstract class ParseActions { - protected File _file; + protected FileLike _file; protected String _comment = null; protected int _currentInsertId = 0; @@ -37,12 +36,12 @@ public void setComment(String c) _comment = c; } - public File getFile() + public FileLike getFile() { return _file; } - public void setFile(File file) + public void setFile(FileLike file) { _file = file; } @@ -66,7 +65,7 @@ public void endElement(ParseContext context) throws SAXException { } - public void characters(ParseContext context, char ch[], int start, int len) + public void characters(ParseContext context, char[] ch, int start, int len) { } } diff --git a/protein/api-src/org/labkey/api/protein/uniprot/uniprot.java b/protein/api-src/org/labkey/api/protein/uniprot/uniprot.java index baa949275..ff8280202 100644 --- a/protein/api-src/org/labkey/api/protein/uniprot/uniprot.java +++ b/protein/api-src/org/labkey/api/protein/uniprot/uniprot.java @@ -89,7 +89,7 @@ public void beginElement(ParseContext context, Attributes attrs) throws SAXExcep if (getCurrentInsertId() == 0) { - _initialInsertion.setString(1, getFile().getPath()); + _initialInsertion.setString(1, getFile().toNioPathForRead().toFile().getPath()); if (getComment() == null) setComment(""); _initialInsertion.setString(2, getComment()); _initialInsertion.setTimestamp(3, new java.sql.Timestamp(new java.util.Date().getTime())); diff --git a/protein/src/org/labkey/protein/ProteinController.java b/protein/src/org/labkey/protein/ProteinController.java index 32e889a82..e5d0648b2 100644 --- a/protein/src/org/labkey/protein/ProteinController.java +++ b/protein/src/org/labkey/protein/ProteinController.java @@ -121,6 +121,8 @@ import org.labkey.api.view.ViewContext; import org.labkey.api.view.WebPartView; import org.labkey.api.view.template.PageConfig; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import org.springframework.validation.BindException; import org.springframework.validation.Errors; import org.springframework.validation.ObjectError; @@ -215,7 +217,7 @@ protected ModelAndView getHtmlView(ProbabilityProteinSearchForm form, BindExcept throw new RedirectException(url + "&" + filter.toQueryString("ProteinSearchResults")); } - if (getViewContext().getRequest().getParameter("ProteinSearchResults.GroupProbability~gte") != null) + if (request.getParameter("ProteinSearchResults.GroupProbability~gte") != null) { try { @@ -232,10 +234,10 @@ protected ModelAndView getHtmlView(ProbabilityProteinSearchForm form, BindExcept catch (NumberFormatException ignored) {} } - WebPartView searchFormView = null; + WebPartView searchFormView = null; for (ProteinService.FormViewProvider provider : ProteinService.get().getProteinSearchFormViewProviders()) { - WebPartView formView = provider.createView(getViewContext(), form); + WebPartView formView = provider.createView(getViewContext(), form); if (formView != null) { searchFormView = formView; @@ -626,7 +628,7 @@ public boolean handlePost(UploadAnnotationsForm form, BindException errors) thro } @Override - public ActionURL getSuccessURL(UploadAnnotationsForm uploadAnnotationsForm) + public ActionURL getSuccessURL(UploadAnnotationsForm uploadAnnotationsFoKeyWTaskrm) { return getBeginURL(getContainer()); } @@ -1171,7 +1173,14 @@ public boolean handlePost(LoadAnnotForm form, BindException errors) throws Excep errors.addError(new LabKeyError("Please enter a file path.")); return false; } + // This action is restricted to site admins so we trust the path they provided. However, we should + // get pickier. File file = FileUtil.getAbsoluteCaseSensitiveFile(new File(fname)); + if (!file.isFile()) + { + throw new NotFoundException("File not found."); + } + FileLike fileLike = FileSystemLike.wrapFile(file); try { @@ -1180,11 +1189,11 @@ public boolean handlePost(LoadAnnotForm form, BindException errors) throws Excep //TODO: this style of dealing with different file types must be repaired. if ("uniprot".equalsIgnoreCase(form.getFileType())) { - loader = new XMLProteinLoader(file, getViewBackgroundInfo(), null, form.isClearExisting()); + loader = new XMLProteinLoader(fileLike, getViewBackgroundInfo(), null, form.isClearExisting()); } else if ("fasta".equalsIgnoreCase(form.getFileType())) { - FastaDbLoader fdbl = new FastaDbLoader(file, getViewBackgroundInfo(), null); + FastaDbLoader fdbl = new FastaDbLoader(fileLike, getViewBackgroundInfo(), null); fdbl.setDefaultOrganism(form.getDefaultOrganism()); fdbl.setOrganismIsToGuessed(form.getShouldGuess() != null); loader = fdbl;