4848import org .labkey .api .singlecell .model .Sample ;
4949import org .labkey .api .singlecell .model .Sort ;
5050import org .labkey .api .singlecell .pipeline .SeuratToolParameter ;
51+ import org .labkey .api .util .FileUtil ;
5152import org .labkey .api .util .PageFlowUtil ;
5253import org .labkey .api .writer .PrintWriters ;
5354import org .labkey .singlecell .run .CellRangerFeatureBarcodeHandler ;
@@ -518,12 +519,12 @@ else if (genomeIds.size() > 1)
518519
519520 public File getValidCiteSeqBarcodeFile (File sourceDir , long gexReadsetId )
520521 {
521- return new File (sourceDir , "validADTS." + gexReadsetId + ".csv" );
522+ return FileUtil . appendName (sourceDir , "validADTS." + gexReadsetId + ".csv" );
522523 }
523524
524525 public File getValidCiteSeqBarcodeMetadataFile (File sourceDir , long gexReadsetId )
525526 {
526- return new File (sourceDir , "validADTS." + gexReadsetId + ".metadata.txt" );
527+ return FileUtil . appendName (sourceDir , "validADTS." + gexReadsetId + ".metadata.txt" );
527528 }
528529
529530 private void writeCiteSeqBarcodes (PipelineJob job , Map <Long , Set <String >> gexToPanels , File outputDir ) throws PipelineJobException
@@ -585,7 +586,7 @@ private void writeCiteSeqBarcodes(PipelineJob job, Map<Long, Set<String>> gexToP
585586
586587 public File getValidHashingBarcodeFile (File sourceDir )
587588 {
588- return new File (sourceDir , "validHashingBarcodes.csv" );
589+ return FileUtil . appendName (sourceDir , "validHashingBarcodes.csv" );
589590 }
590591
591592 @ Override
@@ -725,7 +726,7 @@ public File generateHashingCallsForRawMatrix(Readset parentReadset, PipelineOutp
725726
726727 private Map <Long , Long > getCachedCiteSeqReadsetMap (SequenceAnalysisJobSupport support ) throws PipelineJobException
727728 {
728- return support .getCachedObject (READSET_TO_CITESEQ_MAP , PipelineJob . createObjectMapper (). getTypeFactory (). constructParametricType ( Map . class , Integer . class , Integer . class ) );
729+ return support .getCachedObject (READSET_TO_CITESEQ_MAP , LongHashMap . class );
729730 }
730731
731732 @ Override
@@ -784,7 +785,7 @@ public File getH5FileForGexReadset(SequenceAnalysisJobSupport support, long read
784785 throw new PipelineJobException ("Unable to find loupe file: " + loupe .getPath ());
785786 }
786787
787- File h5 = new File (loupe .getParentFile (), "raw_feature_bc_matrix.h5" );
788+ File h5 = FileUtil . appendName (loupe .getParentFile (), "raw_feature_bc_matrix.h5" );
788789 if (!h5 .exists ())
789790 {
790791 throw new PipelineJobException ("Unable to find h5 file: " + h5 .getPath ());
@@ -796,12 +797,12 @@ public File getH5FileForGexReadset(SequenceAnalysisJobSupport support, long read
796797 @ Override
797798 public File getCDNAInfoFile (File sourceDir )
798799 {
799- return new File (sourceDir , "cDNAInfo.txt" );
800+ return FileUtil . appendName (sourceDir , "cDNAInfo.txt" );
800801 }
801802
802803 public Map <Long , Long > getCachedHashingReadsetMap (SequenceAnalysisJobSupport support ) throws PipelineJobException
803804 {
804- return support .getCachedObject (READSET_TO_HASHING_MAP , PipelineJob . createObjectMapper (). getTypeFactory (). constructParametricType ( Map . class , Integer . class , Integer . class ) );
805+ return support .getCachedObject (READSET_TO_HASHING_MAP , LongHashMap . class );
805806 }
806807
807808 public File getCachedReadsetToCountMatrix (SequenceAnalysisJobSupport support , long readsetId , CellHashingService .BARCODE_TYPE type ) throws PipelineJobException
@@ -1033,7 +1034,7 @@ public List<ToolParameterDescriptor> getHashingCallingParams(boolean allowMethod
10331034
10341035 public File getAllHashingBarcodesFile (File webserverDir )
10351036 {
1036- return new File (webserverDir , BARCODE_TYPE .hashing .getAllBarcodeFileName ());
1037+ return FileUtil . appendName (webserverDir , BARCODE_TYPE .hashing .getAllBarcodeFileName ());
10371038 }
10381039
10391040 private void writeAllHashingBarcodes (Collection <String > groupNames , User u , Container c , File webserverDir ) throws PipelineJobException
@@ -1154,12 +1155,12 @@ else if ("Negative".equals(line[htoClassIdx]))
11541155
11551156 private File getExpectedCallsFile (File outputDir , String basename )
11561157 {
1157- return new File (outputDir , basename + CALL_EXTENSION );
1158+ return FileUtil . appendName (outputDir , basename + CALL_EXTENSION );
11581159 }
11591160
11601161 private File getMolInfoFileFromCounts (File citeSeqCountOutDir )
11611162 {
1162- return new File (citeSeqCountOutDir .getParentFile (), "molecule_info.h5" );
1163+ return FileUtil . appendName (citeSeqCountOutDir .getParentFile (), "molecule_info.h5" );
11631164 }
11641165
11651166 public File generateCellHashingCalls (File citeSeqCountOutDir , File outputDir , String basename , Logger log , File localPipelineDir , CellHashingService .CellHashingParameters parameters , PipelineContext ctx ) throws PipelineJobException
@@ -1195,11 +1196,11 @@ public File generateCellHashingCalls(File citeSeqCountOutDir, File outputDir, St
11951196 File cellBarcodeWhitelistFile = parameters .cellBarcodeWhitelistFile ;
11961197 inputFiles .add (cellBarcodeWhitelistFile );
11971198
1198- File htmlFile = new File (outputDir , basename + ".html" );
1199- File localHtml = new File (localPipelineDir , htmlFile .getName ());
1199+ File htmlFile = FileUtil . appendName (outputDir , basename + ".html" );
1200+ File localHtml = FileUtil . appendName (localPipelineDir , htmlFile .getName ());
12001201
1201- File countFile = new File (outputDir , basename + ".rawCounts.rds" );
1202- File localCounts = new File (localPipelineDir , countFile .getName ());
1202+ File countFile = FileUtil . appendName (outputDir , basename + ".rawCounts.rds" );
1203+ File localCounts = FileUtil . appendName (localPipelineDir , countFile .getName ());
12031204
12041205 // Note: if this job fails and then is resumed, having that pre-existing copy of the HTML can pose a problem
12051206 if (localHtml .exists ())
@@ -1219,7 +1220,7 @@ public File generateCellHashingCalls(File citeSeqCountOutDir, File outputDir, St
12191220 metricsFile .delete ();
12201221 }
12211222
1222- File localRScript = new File (outputDir , "generateCallsWrapper.R" );
1223+ File localRScript = FileUtil . appendName (outputDir , "generateCallsWrapper.R" );
12231224 try (PrintWriter writer = PrintWriters .getPrintWriter (localRScript ))
12241225 {
12251226 String cellbarcodeWhitelist = cellBarcodeWhitelistFile != null ? "'" + cellBarcodeWhitelistFile .getPath () + "'" : "NULL" ;
@@ -1394,7 +1395,7 @@ public void copyHtmlLocally(SequenceOutputHandler.JobContext ctx) throws Pipelin
13941395 if (f .getName ().endsWith (".hashing.html" ))
13951396 {
13961397 ctx .getLogger ().info ("Copying hashing HTML locally for debugging: " + f .getName ());
1397- File target = new File (ctx .getSourceDirectory (), f .getName ());
1398+ File target = FileUtil . appendName (ctx .getSourceDirectory (), f .getName ());
13981399 if (target .exists ())
13991400 {
14001401 target .delete ();
@@ -1461,7 +1462,7 @@ public Set<String> getHtosForReadset(Long hashingReadsetId, File webserverJobDir
14611462 public File subsetBarcodes (File allCellBarcodes , @ Nullable String barcodePrefix ) throws PipelineJobException
14621463 {
14631464 //Subset barcodes by dataset:
1464- File output = new File (allCellBarcodes .getParentFile (), "cellBarcodeWhitelist." + (barcodePrefix == null ? "all" : barcodePrefix ) + ".txt" );
1465+ File output = FileUtil . appendName (allCellBarcodes .getParentFile (), "cellBarcodeWhitelist." + (barcodePrefix == null ? "all" : barcodePrefix ) + ".txt" );
14651466 try (CSVReader reader = new CSVReader (Readers .getReader (allCellBarcodes ), '\t' ); CSVWriter writer = new CSVWriter (PrintWriters .getPrintWriter (output ), '\t' , CSVWriter .NO_QUOTE_CHARACTER ))
14661467 {
14671468 String [] line ;
@@ -1490,7 +1491,7 @@ public File getCellBarcodesFromSeurat(File seuratObj)
14901491
14911492 public File getCellBarcodesFromSeurat (File seuratObj , boolean throwIfNotFound )
14921493 {
1493- File barcodes = new File (seuratObj .getParentFile (), seuratObj .getName ().replaceAll ("seurat.rds$" , "cellBarcodes.csv" ));
1494+ File barcodes = FileUtil . appendName (seuratObj .getParentFile (), seuratObj .getName ().replaceAll ("seurat.rds$" , "cellBarcodes.csv" ));
14941495 if (throwIfNotFound && !barcodes .exists ())
14951496 {
14961497 throw new IllegalArgumentException ("Unable to find expected cell barcodes file. This might indicate the seurat object was created with an older version of the pipeline. Expected: " + barcodes .getPath ());
@@ -1506,7 +1507,7 @@ public File getMetaTableFromSeurat(File seuratObj)
15061507
15071508 public File getMetaTableFromSeurat (File seuratObj , boolean throwIfNotFound )
15081509 {
1509- File barcodes = new File (seuratObj .getParentFile (), seuratObj .getName ().replaceAll ("seurat.rds$" , "seurat.meta.txt.gz" ));
1510+ File barcodes = FileUtil . appendName (seuratObj .getParentFile (), seuratObj .getName ().replaceAll ("seurat.rds$" , "seurat.meta.txt.gz" ));
15101511 if (throwIfNotFound && !barcodes .exists ())
15111512 {
15121513 throw new IllegalArgumentException ("Unable to find expected metadata file. This might indicate the seurat object was created with an older version of the pipeline. Expected: " + barcodes .getPath ());
0 commit comments