Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix languageserver errors #726

Draft
wants to merge 4 commits into
base: dev
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions subworkflows/local/binning.nf
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@ include { FASTA_BINNING_CONCOCT } from '../../subworkflows/nf-co
workflow BINNING {
take:
assemblies // channel: [ val(meta), path(assembly), path(bams), path(bais) ]
reads // channel: [ val(meta), [ reads ] ]
_reads // channel: [ val(meta), [ reads ] ]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If this isn't used in the workflow at all, remove from the workflow inputs?

Copy link
Contributor Author

@dialvarezs dialvarezs Dec 20, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You're right, I guess it makes more sense to remote it.


main:

ch_versions = Channel.empty()

// generate coverage depths for each contig
ch_summarizedepth_input = assemblies
.map { meta, assembly, bams, bais ->
.map { meta, _assembly, bams, bais ->
[ meta, bams, bais ]
}

Expand All @@ -45,7 +45,7 @@ workflow BINNING {
[ meta_new, assembly, bams, bais ]
}
.join( ch_metabat_depths, by: 0 )
.map { meta, assembly, bams, bais, depths ->
.map { meta, assembly, _bams, _bais, depths ->
[ meta, assembly, depths ]
}

Expand Down
14 changes: 9 additions & 5 deletions subworkflows/local/binning_preparation.nf
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,24 @@ workflow BINNING_PREPARATION {
.combine(reads)
} else if (params.binning_map_mode == 'group'){
// combine assemblies with reads of samples from same group
ch_reads_bowtie2 = reads.map{ meta, reads -> [ meta.group, meta, reads ] }
ch_reads_bowtie2 = reads.map{ meta, sample_reads -> [ meta.group, meta, sample_reads ] }
ch_bowtie2_input = BOWTIE2_ASSEMBLY_BUILD.out.assembly_index
.map { meta, assembly, index -> [ meta.group, meta, assembly, index ] }
.combine(ch_reads_bowtie2, by: 0)
.map { group, assembly_meta, assembly, index, reads_meta, reads -> [ assembly_meta, assembly, index, reads_meta, reads ] }
.map { _group, assembly_meta, assembly, index, reads_meta, sample_reads ->
[ assembly_meta, assembly, index, reads_meta, sample_reads ]
}

} else {
// i.e. --binning_map_mode 'own'
// combine assemblies (not co-assembled) with reads from own sample
ch_reads_bowtie2 = reads.map{ meta, reads -> [ meta.id, meta, reads ] }
ch_reads_bowtie2 = reads.map{ meta, sample_reads -> [ meta.id, meta, sample_reads ] }
ch_bowtie2_input = BOWTIE2_ASSEMBLY_BUILD.out.assembly_index
.map { meta, assembly, index -> [ meta.id, meta, assembly, index ] }
.combine(ch_reads_bowtie2, by: 0)
.map { id, assembly_meta, assembly, index, reads_meta, reads -> [ assembly_meta, assembly, index, reads_meta, reads ] }
.map { _id, assembly_meta, assembly, index, reads_meta, sample_reads ->
[ assembly_meta, assembly, index, reads_meta, sample_reads ]
}

}

Expand All @@ -45,7 +49,7 @@ workflow BINNING_PREPARATION {
.map { meta, assembly, bams, bais -> [ meta, assembly.sort()[0], bams, bais ] } // multiple symlinks to the same assembly -> use first of sorted list

emit:
bowtie2_assembly_multiqc = BOWTIE2_ASSEMBLY_ALIGN.out.log.map { assembly_meta, reads_meta, log -> [ log ] }
bowtie2_assembly_multiqc = BOWTIE2_ASSEMBLY_ALIGN.out.log.map { _assembly_meta, _reads_meta, log -> [ log ] }
bowtie2_version = BOWTIE2_ASSEMBLY_ALIGN.out.versions
grouped_mappings = ch_grouped_mappings
}
20 changes: 10 additions & 10 deletions subworkflows/local/binning_refinement.nf
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,21 @@ include { RENAME_POSTDASTOOL } from
workflow BINNING_REFINEMENT {
take:
ch_contigs_for_dastool // channel: [ val(meta), path(contigs) ]
bins // channel: [ val(meta), path(bins) ]
bins // channel: [ val(meta), path(bins) ]

main:
ch_versions = Channel.empty()

// remove domain information, will add it back later
// everything here is either unclassified or a prokaryote
ch_bins = bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta - meta.subMap(['domain','refinement'])
[meta_new, bins]
[meta_new, bin_list]
}
.groupTuple()
.map {
meta, bins -> [meta, bins.flatten()]
meta, bin_list -> [meta, bin_list.flatten()]
}

// prepare bins
Expand Down Expand Up @@ -87,27 +87,27 @@ workflow BINNING_REFINEMENT {
}
.groupTuple()
.map {
meta, bins ->
meta, bin_list ->
def domain_class = params.bin_domain_classification ? 'prokarya' : 'unclassified'
def meta_new = meta + [refinement: 'dastool_refined', domain: domain_class]
[ meta_new, bins ]
[ meta_new, bin_list ]
}

ch_input_for_renamedastool = DASTOOL_DASTOOL.out.bins
.map {
meta, bins ->
meta, bin_list ->
def domain_class = params.bin_domain_classification ? 'prokarya' : 'unclassified'
def meta_new = meta + [refinement: 'dastool_refined', binner: 'DASTool', domain: domain_class]
[ meta_new, bins ]
[ meta_new, bin_list ]
}

RENAME_POSTDASTOOL ( ch_input_for_renamedastool )

refined_unbins = RENAME_POSTDASTOOL.out.refined_unbins
.map {
meta, bins ->
meta, bin_list ->
def meta_new = meta + [refinement: 'dastool_refined_unbinned']
[meta_new, bins]
[meta_new, bin_list]
}

emit:
Expand Down
8 changes: 4 additions & 4 deletions subworkflows/local/depths.nf
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ include { MAG_DEPTHS_SUMMARY } from '../../modules/local/mag_
* Get number of columns in file (first line)
*/
def getColNo(filename) {
lines = file(filename).readLines()
def lines = file(filename).readLines()
return lines[0].split('\t').size()
}

/*
* Get number of rows in a file
*/
def getRowNo(filename) {
lines = file(filename).readLines()
def lines = file(filename).readLines()
return lines.size()
}

Expand All @@ -40,7 +40,7 @@ workflow DEPTHS {
.combine(depths, by: 0)
.transpose()
.map {
meta_combine, meta, bins, depth ->
_meta_combine, meta, bins, depth ->
def meta_new = meta - meta.subMap('domain','refinement')
[meta_new, bins, depth]
}
Expand All @@ -58,7 +58,7 @@ workflow DEPTHS {
// Plot bin depths heatmap for each assembly and mapped samples (according to `binning_map_mode`)
// create file containing group information for all samples
ch_sample_groups = reads
.collectFile(name:'sample_groups.tsv'){ meta, reads -> meta.id + '\t' + meta.group + '\n' }
.collectFile(name:'sample_groups.tsv'){ meta, _sample_reads -> meta.id + '\t' + meta.group + '\n' }

// Filter MAG depth files: use only those for plotting that contain depths for > 2 samples
// as well as > 2 bins
Expand Down
8 changes: 5 additions & 3 deletions subworkflows/local/gtdbtk.nf
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,10 @@ workflow GTDBTK {
.map { row ->
def completeness = -1
def contamination = -1
def missing, duplicated
if (params.busco_db && file(params.busco_db).getBaseName().contains('odb10')) {
def missing
def duplicated
def busco_db = file(params.busco_db)
if (busco_db.getBaseName().contains('odb10')) {
missing = row.'%Missing (specific)' // TODO or just take '%Complete'?
duplicated = row.'%Complete and duplicated (specific)'
} else {
Expand Down Expand Up @@ -54,7 +56,7 @@ workflow GTDBTK {
.transpose()
.map { meta, bin -> [bin.getName(), bin, meta]}
.join(ch_bin_metrics, failOnDuplicate: true)
.map { bin_name, bin, meta, completeness, contamination -> [meta, bin, completeness, contamination] }
.map { _bin_name, bin, meta, completeness, contamination -> [meta, bin, completeness, contamination] }
.branch {
passed: (it[2] != -1 && it[2] >= params.gtdbtk_min_completeness && it[3] != -1 && it[3] <= params.gtdbtk_max_contamination)
return [it[0], it[1]]
Expand Down
2 changes: 1 addition & 1 deletion subworkflows/local/longread_preprocessing.nf
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ workflow LONGREAD_PREPROCESSING {
ch_short_and_long_reads = ch_long_reads
.map { meta, lr -> [ meta.id, meta, lr ] }
.join(ch_short_reads_tmp, by: 0)
.map { id, meta_lr, lr, meta_sr, sr -> [ meta_lr, sr, lr ] } // should not occur for single-end, since SPAdes (hybrid) does not support single-end
.map { _id, meta_lr, lr, _meta_sr, sr -> [ meta_lr, sr, lr ] } // should not occur for single-end, since SPAdes (hybrid) does not support single-end

FILTLONG (
ch_short_and_long_reads
Expand Down
54 changes: 27 additions & 27 deletions subworkflows/local/tiara.nf
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,16 @@ workflow TIARA {
ch_versions = Channel.empty()

bins = bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta + [bin: 'bins']
meta_new.bin = 'bins'
[meta_new, bins]
[meta_new, bin_list]
}

unbins = unbins
.map { meta, unbins ->
.map { meta, unbin_list ->
def meta_new = meta + [bin: 'unbins']
[meta_new, unbins]
[meta_new, unbin_list]
}

ch_tiara_input = bins.mix(unbins)
Expand All @@ -38,54 +38,54 @@ workflow TIARA {
// Have to remove binner information from the meta map to do this
ch_contigs_to_bin_tiara = DASTOOL_FASTATOCONTIG2BIN_TIARA.out.fastatocontig2bin
.combine(ch_tiara_input, by: 0)
.map { meta, contig2bin, bins ->
.map { meta, contig2bin, bin_list ->
def meta_join = meta - meta.subMap('binner', 'bin')
[ meta_join, meta, contig2bin, bins ]
[ meta_join, meta, contig2bin, bin_list ]
}

ch_tiara_classify_input = ch_contigs_to_bin_tiara
.combine( TIARA_TIARA.out.classifications, by: 0)
.map { meta_join, meta, contig2bin, bins, classifications ->
[ meta, classifications, contig2bin, bins ]
.map { _meta_join, meta, contig2bin, bin_list, classifications ->
[ meta, classifications, contig2bin, bin_list ]
}

TIARA_CLASSIFY( ch_tiara_classify_input )
ch_versions = ch_versions.mix(TIARA_CLASSIFY.out.versions.first())

ch_eukarya_bins = TIARA_CLASSIFY.out.eukarya_bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta + [domain: 'eukarya']
[meta_new, bins]
[meta_new, bin_list]
}

ch_prokarya_bins = TIARA_CLASSIFY.out.prokarya_bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta + [domain: 'prokarya']
[meta_new, bins]
[meta_new, bin_list]
}

ch_bacteria_bins = TIARA_CLASSIFY.out.bacteria_bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta + [domain: 'bacteria']
[meta_new, bins]
[meta_new, bin_list]
}

ch_archaea_bins = TIARA_CLASSIFY.out.archaea_bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta + [domain: 'archaea']
[meta_new, bins]
[meta_new, bin_list]
}

ch_organelle_bins = TIARA_CLASSIFY.out.organelle_bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta + [domain: 'organelle']
[meta_new, bins]
[meta_new, bin_list]
}

ch_unknown_bins = TIARA_CLASSIFY.out.unknown_bins
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta + [domain: 'unknown']
[meta_new, bins]
[meta_new, bin_list]
}

ch_classified_bins_unbins = ch_eukarya_bins
Expand All @@ -96,25 +96,25 @@ workflow TIARA {
.mix(ch_unknown_bins)

ch_classified_bins = ch_classified_bins_unbins
.filter { meta, bins ->
.filter { meta, _bin_list ->
meta.bin == "bins"
}
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta - meta.subMap('bin')
[meta_new, bins]
[meta_new, bin_list]
}

ch_classified_unbins = ch_classified_bins_unbins
.filter { meta, bins ->
.filter { meta, _bin_list ->
meta.bin == "unbins"
}
.map { meta, bins ->
.map { meta, bin_list ->
def meta_new = meta - meta.subMap('bin')
[meta_new, bins]
[meta_new, bin_list]
}

ch_bin_classifications = TIARA_CLASSIFY.out.bin_classifications
.map { meta, classification ->
.map { _meta, classification ->
[ classification ]
}
.collect()
Expand Down
Loading