Skip to content

Commit 431345b

Browse files
committed
Another bugfix to serialization of Map<Long, Long>
1 parent 70058a9 commit 431345b

File tree

1 file changed

+10
-10
lines changed

1 file changed

+10
-10
lines changed

singlecell/src/org/labkey/singlecell/CellHashingServiceImpl.java

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,7 @@ public File generateHashingCallsForRawMatrix(Readset parentReadset, PipelineOutp
598598
}
599599

600600
parameters.validate(true);
601-
Map<Long, Long> readsetToHashing = getCachedHashingReadsetMap(ctx.getSequenceSupport());
601+
Map<Long, Integer> readsetToHashing = getCachedHashingReadsetMap(ctx.getSequenceSupport());
602602
if (readsetToHashing.isEmpty())
603603
{
604604
ctx.getLogger().info("No cached " + parameters.type.name() + " readsets, skipping");
@@ -622,7 +622,7 @@ public File generateHashingCallsForRawMatrix(Readset parentReadset, PipelineOutp
622622
ctx.getLogger().debug("total cached readset/" + parameters.type.name() + " readset pairs: " + readsetToHashing.size());
623623
ctx.getLogger().debug("unique indexes: " + lineCount);
624624

625-
Readset htoReadset = ctx.getSequenceSupport().getCachedReadset(readsetToHashing.get(parentReadset.getReadsetId()));
625+
Readset htoReadset = ctx.getSequenceSupport().getCachedReadset((long)readsetToHashing.get(parentReadset.getReadsetId()));
626626
if (htoReadset == null)
627627
{
628628
throw new PipelineJobException("Unable to find HTO readset for readset: " + parentReadset.getRowId());
@@ -724,15 +724,15 @@ public File generateHashingCallsForRawMatrix(Readset parentReadset, PipelineOutp
724724
return callsFile;
725725
}
726726

727-
private Map<Long, Long> getCachedCiteSeqReadsetMap(SequenceAnalysisJobSupport support) throws PipelineJobException
727+
private Map<Long, Integer> getCachedCiteSeqReadsetMap(SequenceAnalysisJobSupport support) throws PipelineJobException
728728
{
729729
return support.getCachedObject(READSET_TO_CITESEQ_MAP, LongHashMap.class);
730730
}
731731

732732
@Override
733733
public boolean usesCellHashing(SequenceAnalysisJobSupport support, File sourceDir) throws PipelineJobException
734734
{
735-
Map<Long, Long> gexToHashingMap = getCachedHashingReadsetMap(support);
735+
Map<Long, Integer> gexToHashingMap = getCachedHashingReadsetMap(support);
736736
if (gexToHashingMap == null || gexToHashingMap.isEmpty())
737737
return false;
738738

@@ -748,7 +748,7 @@ public boolean usesCellHashing(SequenceAnalysisJobSupport support, File sourceDi
748748
@Override
749749
public boolean usesCiteSeq(SequenceAnalysisJobSupport support, List<SequenceOutputFile> inputFiles) throws PipelineJobException
750750
{
751-
Map<Long, Long> gexToCiteMap = getCachedCiteSeqReadsetMap(support);
751+
Map<Long, Integer> gexToCiteMap = getCachedCiteSeqReadsetMap(support);
752752
if (gexToCiteMap == null || gexToCiteMap.isEmpty())
753753
return false;
754754

@@ -800,7 +800,7 @@ public File getCDNAInfoFile(File sourceDir)
800800
return FileUtil.appendName(sourceDir, "cDNAInfo.txt");
801801
}
802802

803-
public Map<Long, Long> getCachedHashingReadsetMap(SequenceAnalysisJobSupport support) throws PipelineJobException
803+
public Map<Long, Integer> getCachedHashingReadsetMap(SequenceAnalysisJobSupport support) throws PipelineJobException
804804
{
805805
return support.getCachedObject(READSET_TO_HASHING_MAP, LongHashMap.class);
806806
}
@@ -1370,13 +1370,13 @@ public File generateCellHashingCalls(File citeSeqCountOutDir, File outputDir, St
13701370
@Override
13711371
public File getExistingFeatureBarcodeCountDir(Readset parentReadset, BARCODE_TYPE type, SequenceAnalysisJobSupport support) throws PipelineJobException
13721372
{
1373-
Long childId = type == BARCODE_TYPE.hashing ? getCachedHashingReadsetMap(support).get(parentReadset.getReadsetId()) : getCachedCiteSeqReadsetMap(support).get(parentReadset.getReadsetId());
1373+
Integer childId = type == BARCODE_TYPE.hashing ? getCachedHashingReadsetMap(support).get(parentReadset.getReadsetId()) : getCachedCiteSeqReadsetMap(support).get(parentReadset.getReadsetId());
13741374
if (childId == null)
13751375
{
13761376
throw new PipelineJobException("Unable to find cached readset of type " + type.name() + " for parent: " + parentReadset.getReadsetId());
13771377
}
13781378

1379-
File ret = getCachedReadsetToCountMatrix(support, childId, type);
1379+
File ret = getCachedReadsetToCountMatrix(support, (long)childId, type);
13801380
if (ret == null)
13811381
{
13821382
throw new PipelineJobException("Unable to find cached count matrix of type " + type.name() + " for parent: " + parentReadset.getReadsetId());
@@ -1421,7 +1421,7 @@ public void copyHtmlLocally(SequenceOutputHandler.JobContext ctx) throws Pipelin
14211421
@Override
14221422
public Set<String> getHtosForParentReadset(Long parentReadsetId, File webserverJobDir, SequenceAnalysisJobSupport support, boolean throwIfNotFound) throws PipelineJobException
14231423
{
1424-
Long htoReadset = getCachedHashingReadsetMap(support).get(parentReadsetId);
1424+
Integer htoReadset = getCachedHashingReadsetMap(support).get(parentReadsetId);
14251425
if (htoReadset == null)
14261426
{
14271427
if (throwIfNotFound)
@@ -1434,7 +1434,7 @@ public Set<String> getHtosForParentReadset(Long parentReadsetId, File webserverJ
14341434
}
14351435
}
14361436

1437-
return getHtosForReadset(htoReadset, webserverJobDir);
1437+
return getHtosForReadset((long)htoReadset, webserverJobDir);
14381438
}
14391439

14401440
public Set<String> getHtosForReadset(Long hashingReadsetId, File webserverJobDir) throws PipelineJobException

0 commit comments

Comments
 (0)