@@ -37,12 +37,12 @@ def do_call(args):
3737 "threads" : args .threads ,
3838 # "amperr": args.amperrs,
3939 # "amperri": args.amperri,
40- "amperr_file" : args .output + "/" + args . output + " .amp.tn.txt" ,
41- "amperri_file" : args .output + "/" + args . output + " .amp.id.txt" ,
40+ "amperr_file" : args .output + ".amp.tn.txt" ,
41+ "amperri_file" : args .output + ".amp.id.txt" ,
4242 # "dmgerr": args.dmgerrs,
4343 # "dmgerri": args.dmgerri,
44- "dmgerr_file" : args .output + "/" + args . output + " .dmg.tn.txt" ,
45- "dmgerri_file" : args .output + "/" + args . output + " .dmg.id.txt" ,
44+ "dmgerr_file" : args .output + ".dmg.tn.txt" ,
45+ "dmgerri_file" : args .output + ".dmg.id.txt" ,
4646 "mutRate" : args .mutRate ,
4747 "pcutoff" : args .threshold ,
4848 "mapq" : args .mapq ,
@@ -133,7 +133,7 @@ def do_call(args):
133133 except OSError as e :
134134 if e .errno != errno .EEXIST :
135135 raise
136- bamObject = BAM (args .bam , "rb" )
136+ bamObject = BAM (args .bam , "rb" , args . reference )
137137
138138 """
139139 Execulte variant calling
@@ -184,7 +184,7 @@ def do_call(args):
184184 # print(args.threads)
185185 # if args.normalBam:
186186 cutSites , chunkSize , contigs = splitBamRegions (
187- [args .bam ], args .threads , contigs , args .windowSize
187+ [args .bam ], args .threads , contigs , args .windowSize , args . reference
188188 )
189189 # else:
190190 # cutSites, chunkSize, contigs = splitBamRegions(
@@ -284,17 +284,17 @@ def do_call(args):
284284 mismatch_dmg_profile , columns = ["A" , "T" , "C" , "G" ], index = num2trinuc
285285 )
286286 # np.savetxt(params["output"] + "/" + args.output + ".amp.tn.txt",np.hstack([trinuc_cols[0:32],mismatch_profile]),delimiter="\t",header=" \tA\tT\tC\tG\n")
287- amp_tn_pd .to_csv (params [ "output" ] + "/" + args .output + ".amp.tn.txt" , sep = "\t " )
287+ amp_tn_pd .to_csv (args .output + ".amp.tn.txt" , sep = "\t " )
288288 np .savetxt (
289- params [ "output" ] + "/" + args .output + ".amp.id.txt" ,
289+ args .output + ".amp.id.txt" ,
290290 indelerr_profile ,
291291 delimiter = "\t " ,
292292 fmt = "%d" ,
293293 )
294- dmg_tn_pd .to_csv (params [ "output" ] + "/" + args .output + ".dmg.tn.txt" , sep = "\t " )
294+ dmg_tn_pd .to_csv (args .output + ".dmg.tn.txt" , sep = "\t " )
295295 # np.savetxt(params["output"] + "/" + args.output + ".dmg.tn.txt",np.hstack([trinuc_cols,mismatch_dmg_profile]),delimiter="\t",header=" \tA\tT\tC\tG\n")
296296 np .savetxt (
297- params [ "output" ] + "/" + args .output + ".dmg.id.txt" ,
297+ args .output + ".dmg.id.txt" ,
298298 indelerr_dmg_profile ,
299299 delimiter = "\t " ,
300300 fmt = "%d" ,
@@ -368,11 +368,11 @@ def do_call(args):
368368 print ("....Splitting genomic regions for parallel execution....." )
369369 if args .normalBams :
370370 cutSites , chunkSize , contigs = splitBamRegions (
371- [args .bam ], args .threads , contigs , args .windowSize
371+ [args .bam ], args .threads , contigs , args .windowSize , args . reference
372372 )
373373 else :
374374 cutSites , chunkSize , contigs = splitBamRegions (
375- [args .bam ], args .threads , contigs , args .windowSize
375+ [args .bam ], args .threads , contigs , args .windowSize , args . reference
376376 )
377377 currentContigIndex = 0
378378 usedTime = (time .time () - startTime ) / 60
@@ -491,7 +491,7 @@ def do_call(args):
491491 FPAll = sum (FPs , [])
492492 RPAll = sum (RPs , [])
493493
494- tBam = BAM (args .bam , "rb" )
494+ tBam = BAM (args .bam , "rb" , args . reference )
495495 contigs = tBam .references
496496 # print(contigs)
497497 chromDict = {contig : tBam .get_reference_length (contig ) for contig in contigs }
@@ -518,11 +518,11 @@ def do_call(args):
518518 }
519519 filterDict = {"PASS" : "All filter Passed" }
520520 vcfLines = createVcfStrings (chromDict , infoDict , formatDict , filterDict , mutsAll )
521- with open (args .output + "/" + args . output + " _snv.vcf" , "w" ) as vcf :
521+ with open (args .output + "_snv.vcf" , "w" ) as vcf :
522522 vcf .write (vcfLines )
523523
524524 vcfLines = createVcfStrings (chromDict , infoDict , formatDict , filterDict , indelsAll )
525- with open (args .output + "/" + args . output + " _indel.vcf" , "w" ) as vcf :
525+ with open (args .output + "_indel.vcf" , "w" ) as vcf :
526526 vcf .write (vcfLines )
527527
528528 burden_naive = muts_num / (coverage )
@@ -531,7 +531,7 @@ def do_call(args):
531531 pass_duprate = unique_read_num / pass_read_num
532532
533533 with open (
534- params [ "output" ] + "/" + args .output + "_duplex_group_stats.txt" , "w"
534+ args .output + "_duplex_group_stats.txt" , "w"
535535 ) as f :
536536 f .write (
537537 "duplex_group_strand_composition\t duplex_group_number\t \
@@ -570,13 +570,13 @@ def do_call(args):
570570 trinuc_by_duplex_group = pd .DataFrame (duplex_read_num_trinuc )
571571 trinuc_by_duplex_group .insert (0 , "" , trinuc_list )
572572 trinuc_by_duplex_group .to_csv (
573- params [ "output" ] + "/" + args .output + "_trinuc_by_duplex_group.txt" ,
573+ args .output + "_trinuc_by_duplex_group.txt" ,
574574 sep = "\t " ,
575575 index = False ,
576576 )
577577
578578 muts_by_group = np .loadtxt (
579- params [ "output" ] + "/" + args .output + "_duplex_group_stats.txt" ,
579+ args .output + "_duplex_group_stats.txt" ,
580580 skiprows = 1 ,
581581 dtype = float ,
582582 delimiter = "\t " ,
@@ -610,7 +610,7 @@ def do_call(args):
610610 lgd2 = mpatches .Patch (color = "blue" , label = "Naive" )
611611 plt .legend (handles = [lgd1 , lgd2 ])
612612 plt .savefig (
613- params [ "output" ] + "/" + args .output + "_burden_by_duplex_group_size.png"
613+ args .output + "_burden_by_duplex_group_size.png"
614614 )
615615 """
616616 if len(FPAll + RPAll) != 0:
@@ -633,7 +633,7 @@ def do_call(args):
633633 clonal_num = 0
634634 """
635635
636- with open (params [ "output" ] + "/" + args .output + "_stats.txt" , "w" ) as f :
636+ with open (args .output + "_stats.txt" , "w" ) as f :
637637 f .write (f"Number of Read Families\t { unique_read_num } \n " )
638638 f .write (f"Number of Pass-filter Reads\t { pass_read_num } \n " )
639639 f .write (f"Number of Effective Read Families\t { duplex_num } \n " )
0 commit comments