|
|
@@ -1,10 +1,18 @@
|
|
|
#let cr_colors = (
|
|
|
- dark_grey: rgb("#333333"), beige: rgb("#fdf0d5"), light_grey: rgb("#eeeeee"), dark_red: rgb("#780000"), red: rgb("#c1121f"), blue: rgb("#669bbc"), dark_blue: rgb("#003049"), green: rgb("#29bf12"),
|
|
|
+ dark_grey: rgb("#333333"),
|
|
|
+ beige: rgb("#fdf0d5"),
|
|
|
+ light_grey: rgb("#eeeeee"),
|
|
|
+ dark_red: rgb("#780000"),
|
|
|
+ red: rgb("#c1121f"),
|
|
|
+ blue: rgb("#669bbc"),
|
|
|
+ dark_blue: rgb("#003049"),
|
|
|
+ green: rgb("#29bf12"),
|
|
|
)
|
|
|
|
|
|
#import "@preview/fletcher:0.5.1" as fletcher: diagram, node, edge
|
|
|
#import "@preview/metro:0.3.0": *
|
|
|
#import "@preview/cetz:0.2.2"
|
|
|
+#import "@preview/badgery:0.1.1": *
|
|
|
|
|
|
#set page(paper: "a4",
|
|
|
// fill: cr_colors.light_grey,
|
|
|
@@ -27,7 +35,30 @@
|
|
|
#set text(size: 16pt, fill: cr_colors.dark_blue)
|
|
|
|
|
|
#let contigs = (
|
|
|
- "chr1", "chr2", "chr3", "chr4", "chr5", "chr6", "chr7", "chr8", "chr9", "chr10", "chr11", "chr12", "chr13", "chr14", "chr15", "chr16", "chr17", "chr18", "chr19", "chr20", "chr21", "chr22", "chrX", "chrY",
|
|
|
+ "chr1",
|
|
|
+ "chr2",
|
|
|
+ "chr3",
|
|
|
+ "chr4",
|
|
|
+ "chr5",
|
|
|
+ "chr6",
|
|
|
+ "chr7",
|
|
|
+ "chr8",
|
|
|
+ "chr9",
|
|
|
+ "chr10",
|
|
|
+ "chr11",
|
|
|
+ "chr12",
|
|
|
+ "chr13",
|
|
|
+ "chr14",
|
|
|
+ "chr15",
|
|
|
+ "chr16",
|
|
|
+ "chr17",
|
|
|
+ "chr18",
|
|
|
+ "chr19",
|
|
|
+ "chr20",
|
|
|
+ "chr21",
|
|
|
+ "chr22",
|
|
|
+ "chrX",
|
|
|
+ "chrY",
|
|
|
)
|
|
|
|
|
|
#let parseCustomDate(dateString) = {
|
|
|
@@ -52,7 +83,11 @@
|
|
|
capitalizedWords.join(" ")
|
|
|
}
|
|
|
#let si-fmt(val, precision: 1, sep: "\u{202F}", binary: false) = {
|
|
|
- let factor = if binary { 1024 } else { 1000 }
|
|
|
+ let factor = if binary {
|
|
|
+ 1024
|
|
|
+ } else {
|
|
|
+ 1000
|
|
|
+ }
|
|
|
let gt1_suffixes = ("k", "M", "G", "T", "P", "E", "Z", "Y")
|
|
|
let lt1_suffixes = ("m", "μ", "n", "p", "f", "a", "z", "y")
|
|
|
let scale = ""
|
|
|
@@ -104,14 +139,24 @@
|
|
|
image(path + "_chromosome.svg")
|
|
|
let data = json(path + "_stats.json")
|
|
|
grid(
|
|
|
- columns: (1fr, 2fr), gutter: 3pt, align(
|
|
|
- left + horizon,
|
|
|
- )[
|
|
|
+ columns: (1fr, 2fr),
|
|
|
+ gutter: 3pt,
|
|
|
+ align(left + horizon)[
|
|
|
#set text(size: 12pt)
|
|
|
#table(
|
|
|
- stroke: none, columns: (auto, 1fr), gutter: 3pt, [Mean], [#calc.round(data.mean, digits: 2)], [Standard dev.], [#calc.round(data.std_dev, digits: 2)], ..data.breaks_values.map(r => ([#r.at(0)], [#calc.round(r.at(1) * 100, digits: 1)%])).flatten(),
|
|
|
+ stroke: none, columns: (auto, 1fr), gutter: 3pt, [Mean], [#calc.round(
|
|
|
+ data.mean,
|
|
|
+ digits: 2,
|
|
|
+ )], [Standard dev.], [#calc.round(
|
|
|
+ data.std_dev,
|
|
|
+ digits: 2,
|
|
|
+ )], ..data.breaks_values.map(r => (
|
|
|
+ [#r.at(0)],
|
|
|
+ [#calc.round(r.at(1) * 100, digits: 1)%],
|
|
|
+ )).flatten(),
|
|
|
)
|
|
|
- ], align(right, image(path + "_distrib.svg", width: 100%)),
|
|
|
+ ],
|
|
|
+ align(right, image(path + "_distrib.svg", width: 100%)),
|
|
|
)
|
|
|
|
|
|
parbreak()
|
|
|
@@ -121,23 +166,32 @@
|
|
|
#let reportBam(path) = {
|
|
|
let data = json(path)
|
|
|
table(
|
|
|
- gutter: 3pt, stroke: none, columns: (auto, 1fr), ..for (key, value) in data {
|
|
|
+ gutter: 3pt, stroke: none, columns: (auto, 1fr), ..for (key, value) in (
|
|
|
+ data
|
|
|
+ ) {
|
|
|
if key != "cramino" and key != "composition" and key != "path" and key != "modified" {
|
|
|
([ #formatString(key) ], [ #value ])
|
|
|
} else if key == "modified" {
|
|
|
([ Modified Date (UTC) ], [ #parseCustomDate(value) ])
|
|
|
} else if key == "composition" {
|
|
|
- ([ Run(s) ], [
|
|
|
- #for (i, v) in value.enumerate() {
|
|
|
- if i > 0 [ \ ]
|
|
|
- [#v.at(0).slice(0, 5): #calc.round(v.at(1), digits: 0)%]
|
|
|
- }
|
|
|
- ])
|
|
|
+ (
|
|
|
+ [ Run(s) ],
|
|
|
+ [
|
|
|
+ #for (i, v) in value.enumerate() {
|
|
|
+ if i > 0 [ \ ]
|
|
|
+ [#v.at(0).slice(0, 5): #calc.round(v.at(1), digits: 0)%]
|
|
|
+ }
|
|
|
+ ],
|
|
|
+ )
|
|
|
} else if key == "cramino" {
|
|
|
for (k, v) in value {
|
|
|
- if k == "normalized_read_count_per_chromosome" {} else if k != "path" and k != "checksum" and k != "creation_time" and k != "file_name" {
|
|
|
+ if k == "normalized_read_count_per_chromosome" { } else if k != "path" and k != "checksum" and k != "creation_time" and k != "file_name" {
|
|
|
let k = formatString(k)
|
|
|
- let v = if type(v) == "integer" { si-fmt(v) } else { v }
|
|
|
+ let v = if type(v) == "integer" {
|
|
|
+ si-fmt(v)
|
|
|
+ } else {
|
|
|
+ v
|
|
|
+ }
|
|
|
([ #k ], [ #v ])
|
|
|
} else {
|
|
|
()
|
|
|
@@ -162,7 +216,7 @@
|
|
|
}
|
|
|
|
|
|
#let printReadCount(diag_path, mrd_path) = {
|
|
|
- let index = 14;
|
|
|
+ let index = 14
|
|
|
let c = contigs
|
|
|
c.push("chrM")
|
|
|
let diag = formatedReadCount(diag_path)
|
|
|
@@ -172,7 +226,9 @@
|
|
|
mrd.insert(0, "mrd")
|
|
|
let arrays1 = (c.slice(0, index), diag.slice(0, index), mrd.slice(0, index))
|
|
|
table(
|
|
|
- columns: arrays1.at(0).len(), ..arrays1.map(arr => arr.map(item => [#item])).flatten(),
|
|
|
+ columns: arrays1.at(0).len(), ..arrays1
|
|
|
+ .map(arr => arr.map(item => [#item]))
|
|
|
+ .flatten(),
|
|
|
)
|
|
|
|
|
|
let arrays2 = (c.slice(index), diag.slice(index), mrd.slice(index))
|
|
|
@@ -181,7 +237,9 @@
|
|
|
arrays2.at(2).insert(0, "mrd")
|
|
|
|
|
|
table(
|
|
|
- columns: arrays2.at(0).len(), ..arrays2.map(arr => arr.map(item => [#item])).flatten(),
|
|
|
+ columns: arrays2.at(0).len(), ..arrays2
|
|
|
+ .map(arr => arr.map(item => [#item]))
|
|
|
+ .flatten(),
|
|
|
)
|
|
|
}
|
|
|
|
|
|
@@ -190,24 +248,84 @@
|
|
|
let data = json(path)
|
|
|
set text(8pt)
|
|
|
diagram(
|
|
|
- spacing: (8pt, 25pt), node-fill: gradient.radial(cr_colors.light_grey, cr_colors.blue, radius: 300%), node-stroke: cr_colors.dark_blue + 1pt, edge-stroke: 1pt, mark-scale: 70%, node-inset: 8pt, node(
|
|
|
- (0.2, 0), [Variants MRD: #num(data.vcf_stats.n_tumoral_init)], corner-radius: 2pt, extrude: (0, 3), name: <input_mrd>,
|
|
|
- ), node(
|
|
|
- (1.8, 0), [Variants Diag: #num(data.vcf_stats.n_constit_init)], corner-radius: 2pt, extrude: (0, 3), name: <input_diag>,
|
|
|
- ), node(
|
|
|
- (1, 1), align(center)[Variant in MRD ?], shape: diamond, name: <is_in_mrd>,
|
|
|
- ), edge(<input_mrd>, "s", <is_in_mrd>, "-|>"), edge(<input_diag>, "s", <is_in_mrd>, "-|>"), edge(<is_in_mrd>, <is_low_mrd>, "-|>", [Yes], label-pos: 0.8), node(
|
|
|
- (0.25, 2), [MRD variant depth \ < 4 ?], shape: diamond, name: <is_low_mrd>,
|
|
|
- ), edge(<is_low_mrd>, <low_mrd>, "-|>"), node(
|
|
|
- (0, 3), [Low MRD depth: #num(data.vcf_stats.n_low_mrd_depth)], shape: parallelogram, name: <low_mrd>,
|
|
|
- ), edge(<is_in_mrd>, <next>, "-|>", [No], label-pos: 0.8), node(
|
|
|
- (1.85, 2), [To BAM filters: #num(data.bam_stats.n_lasting)], shape: chevron, extrude: (-3, 0), name: <next>, stroke: cr_colors.green,
|
|
|
- ), edge(<is_low_mrd>, <homo>, "-|>"), node((1.5, 3), [VAF = 100% ?], shape: diamond, name: <homo>), edge(<homo>, <constit>, "-|>", [Yes], label-pos: 0.5, bend: -80deg), edge(<homo>, <chi>, "-|>", [No], label-pos: 0.6), node(
|
|
|
- (1.5, 4), [$#sym.chi^2$ VAF MRD vs Diag ?], shape: diamond, name: <chi>,
|
|
|
- ), edge(<chi>, <constit>, "-|>", label-pos: 0.8), node(
|
|
|
- (1, 5), [Constit: #num(data.vcf_stats.n_constit)], shape: parallelogram, name: <constit>,
|
|
|
- ), edge(<chi>, <loh>, "-|>", [p < 0.01], label-pos: 0.8), node(
|
|
|
- (2, 5), [LOH: #num(data.vcf_stats.n_loh)], shape: parallelogram, name: <loh>,
|
|
|
+ spacing: (8pt, 25pt),
|
|
|
+ node-fill: gradient.radial(
|
|
|
+ cr_colors.light_grey,
|
|
|
+ cr_colors.blue,
|
|
|
+ radius: 300%,
|
|
|
+ ),
|
|
|
+ node-stroke: cr_colors.dark_blue + 1pt,
|
|
|
+ edge-stroke: 1pt,
|
|
|
+ mark-scale: 70%,
|
|
|
+ node-inset: 8pt,
|
|
|
+ node(
|
|
|
+ (0.2, 0),
|
|
|
+ [Variants MRD: #num(data.vcf_stats.n_tumoral_init)],
|
|
|
+ corner-radius: 2pt,
|
|
|
+ extrude: (0, 3),
|
|
|
+ name: <input_mrd>,
|
|
|
+ ),
|
|
|
+ node(
|
|
|
+ (1.8, 0),
|
|
|
+ [Variants Diag: #num(data.vcf_stats.n_constit_init)],
|
|
|
+ corner-radius: 2pt,
|
|
|
+ extrude: (0, 3),
|
|
|
+ name: <input_diag>,
|
|
|
+ ),
|
|
|
+ node(
|
|
|
+ (1, 1),
|
|
|
+ align(center)[Variant in MRD ?],
|
|
|
+ shape: diamond,
|
|
|
+ name: <is_in_mrd>,
|
|
|
+ ),
|
|
|
+ edge(<input_mrd>, "s", <is_in_mrd>, "-|>"),
|
|
|
+ edge(<input_diag>, "s", <is_in_mrd>, "-|>"),
|
|
|
+ edge(<is_in_mrd>, <is_low_mrd>, "-|>", [Yes], label-pos: 0.8),
|
|
|
+ node(
|
|
|
+ (0.25, 2),
|
|
|
+ [MRD variant depth \ < 4 ?],
|
|
|
+ shape: diamond,
|
|
|
+ name: <is_low_mrd>,
|
|
|
+ ),
|
|
|
+ edge(<is_low_mrd>, <low_mrd>, "-|>"),
|
|
|
+ node(
|
|
|
+ (0, 3),
|
|
|
+ [Low MRD depth: #num(data.vcf_stats.n_low_mrd_depth)],
|
|
|
+ shape: parallelogram,
|
|
|
+ name: <low_mrd>,
|
|
|
+ ),
|
|
|
+ edge(<is_in_mrd>, <next>, "-|>", [No], label-pos: 0.8),
|
|
|
+ node(
|
|
|
+ (1.85, 2),
|
|
|
+ [To BAM filters: #num(data.bam_stats.n_lasting)],
|
|
|
+ shape: chevron,
|
|
|
+ extrude: (-3, 0),
|
|
|
+ name: <next>,
|
|
|
+ stroke: cr_colors.green,
|
|
|
+ ),
|
|
|
+ edge(<is_low_mrd>, <homo>, "-|>"),
|
|
|
+ node((1.5, 3), [VAF = 100% ?], shape: diamond, name: <homo>),
|
|
|
+ edge(<homo>, <constit>, "-|>", [Yes], label-pos: 0.5, bend: -80deg),
|
|
|
+ edge(<homo>, <chi>, "-|>", [No], label-pos: 0.6),
|
|
|
+ node(
|
|
|
+ (1.5, 4),
|
|
|
+ [$#sym.chi^2$ VAF MRD vs Diag ?],
|
|
|
+ shape: diamond,
|
|
|
+ name: <chi>,
|
|
|
+ ),
|
|
|
+ edge(<chi>, <constit>, "-|>", label-pos: 0.8),
|
|
|
+ node(
|
|
|
+ (1, 5),
|
|
|
+ [Constit: #num(data.vcf_stats.n_constit)],
|
|
|
+ shape: parallelogram,
|
|
|
+ name: <constit>,
|
|
|
+ ),
|
|
|
+ edge(<chi>, <loh>, "-|>", [p < 0.01], label-pos: 0.8),
|
|
|
+ node(
|
|
|
+ (2, 5),
|
|
|
+ [LOH: #num(data.vcf_stats.n_loh)],
|
|
|
+ shape: parallelogram,
|
|
|
+ name: <loh>,
|
|
|
),
|
|
|
)
|
|
|
}
|
|
|
@@ -215,23 +333,71 @@
|
|
|
#let bamFilter(path) = {
|
|
|
import fletcher.shapes: diamond, parallelogram, hexagon
|
|
|
let data = json(path)
|
|
|
- set text(8pt)
|
|
|
+ set text(8pt)
|
|
|
|
|
|
diagram(
|
|
|
- spacing: (8pt, 25pt), node-fill: gradient.radial(cr_colors.light_grey, cr_colors.blue, radius: 300%), node-inset: 8pt, node-stroke: cr_colors.dark_blue + 1pt, mark-scale: 70%, edge-stroke: 1pt, node(
|
|
|
- (0.75, 0), [Variants not in MRD VCF: #num(data.bam_stats.n_lasting)], corner-radius: 2pt, extrude: (0, 3), name: <input_mrd>,
|
|
|
- ), edge(<input_mrd>, <depth>, "-|>"), node((0.75, 1), [MRD alignement depth ?], shape: diamond, name: <depth>), edge(<depth>, <low_depth>, "-|>", [< 4]), node(
|
|
|
- (0, 2), [Low MRD depth: #num(data.bam_stats.n_low_mrd_depth)], shape: parallelogram, name: <low_depth>,
|
|
|
- ), edge(<depth>, <seen>, "-|>"), node(
|
|
|
- (0.75, 3), [Alt. base seen in MRD pileup ?], shape: diamond, name: <seen>,
|
|
|
- ), edge(<seen>, <constit>, "-|>", [Yes]), node(
|
|
|
- (0, 4), [Constit: #num(data.bam_stats.n_constit)], shape: parallelogram, name: <constit>,
|
|
|
- ), edge(<seen>, <is_div>, "-|>", [No]), node(
|
|
|
- (1.1, 4), [Sequence #sym.plus.minus 20nt \ diversity ?], shape: diamond, name: <is_div>,
|
|
|
- ), edge(<is_div>, <low_div>, "-|>", [entropy < 1.8]), node(
|
|
|
- (0.25, 5), [Low diversity, artefact: #num(data.bam_stats.n_low_diversity)], shape: parallelogram, name: <low_div>,
|
|
|
- ), edge(<is_div>, <somatic>, "-|>"), node(
|
|
|
- (1.75, 5), [Somatic: #num(data.bam_stats.n_somatic)], shape: hexagon, extrude: (-3, 0), name: <somatic>, stroke: cr_colors.green,
|
|
|
+ spacing: (8pt, 25pt),
|
|
|
+ node-fill: gradient.radial(
|
|
|
+ cr_colors.light_grey,
|
|
|
+ cr_colors.blue,
|
|
|
+ radius: 300%,
|
|
|
+ ),
|
|
|
+ node-inset: 8pt,
|
|
|
+ node-stroke: cr_colors.dark_blue + 1pt,
|
|
|
+ mark-scale: 70%,
|
|
|
+ edge-stroke: 1pt,
|
|
|
+ node(
|
|
|
+ (0.75, 0),
|
|
|
+ [Variants not in MRD VCF: #num(data.bam_stats.n_lasting)],
|
|
|
+ corner-radius: 2pt,
|
|
|
+ extrude: (0, 3),
|
|
|
+ name: <input_mrd>,
|
|
|
+ ),
|
|
|
+ edge(<input_mrd>, <depth>, "-|>"),
|
|
|
+ node((0.75, 1), [MRD alignement depth ?], shape: diamond, name: <depth>),
|
|
|
+ edge(<depth>, <low_depth>, "-|>", [< 4]),
|
|
|
+ node(
|
|
|
+ (0, 2),
|
|
|
+ [Low MRD depth: #num(data.bam_stats.n_low_mrd_depth)],
|
|
|
+ shape: parallelogram,
|
|
|
+ name: <low_depth>,
|
|
|
+ ),
|
|
|
+ edge(<depth>, <seen>, "-|>"),
|
|
|
+ node(
|
|
|
+ (0.75, 3),
|
|
|
+ [Alt. base seen in MRD pileup ?],
|
|
|
+ shape: diamond,
|
|
|
+ name: <seen>,
|
|
|
+ ),
|
|
|
+ edge(<seen>, <constit>, "-|>", [Yes]),
|
|
|
+ node(
|
|
|
+ (0, 4),
|
|
|
+ [Constit: #num(data.bam_stats.n_constit)],
|
|
|
+ shape: parallelogram,
|
|
|
+ name: <constit>,
|
|
|
+ ),
|
|
|
+ edge(<seen>, <is_div>, "-|>", [No]),
|
|
|
+ node(
|
|
|
+ (1.1, 4),
|
|
|
+ [Sequence #sym.plus.minus 20nt \ diversity ?],
|
|
|
+ shape: diamond,
|
|
|
+ name: <is_div>,
|
|
|
+ ),
|
|
|
+ edge(<is_div>, <low_div>, "-|>", [entropy < 1.8]),
|
|
|
+ node(
|
|
|
+ (0.25, 5),
|
|
|
+ [Low diversity, artefact: #num(data.bam_stats.n_low_diversity)],
|
|
|
+ shape: parallelogram,
|
|
|
+ name: <low_div>,
|
|
|
+ ),
|
|
|
+ edge(<is_div>, <somatic>, "-|>"),
|
|
|
+ node(
|
|
|
+ (1.75, 5),
|
|
|
+ [Somatic: #num(data.bam_stats.n_somatic)],
|
|
|
+ shape: hexagon,
|
|
|
+ extrude: (-3, 0),
|
|
|
+ name: <somatic>,
|
|
|
+ stroke: cr_colors.green,
|
|
|
),
|
|
|
)
|
|
|
}
|
|
|
@@ -239,7 +405,7 @@
|
|
|
#let barCallers(path) = {
|
|
|
import cetz.draw: *
|
|
|
import cetz.chart
|
|
|
-
|
|
|
+
|
|
|
let json_data = json(path).variants_stats
|
|
|
let data = json_data.find(item => item.name == "callers_cat")
|
|
|
let chart_data = data.counts.pairs().sorted(key: x => -x.at(1))
|
|
|
@@ -248,12 +414,15 @@
|
|
|
cetz.canvas(
|
|
|
length: 80%,
|
|
|
{
|
|
|
- set-style(axes: (bottom: (tick: (label: (angle: 45deg, anchor: "north-east")))))
|
|
|
+ set-style(axes: (
|
|
|
+ bottom: (tick: (label: (angle: 45deg, anchor: "north-east"))),
|
|
|
+ ))
|
|
|
chart.columnchart(
|
|
|
chart_data,
|
|
|
size: (1, 0.5),
|
|
|
)
|
|
|
- })
|
|
|
+ },
|
|
|
+ )
|
|
|
}
|
|
|
#let truncate(text, max-length) = {
|
|
|
if text.len() <= max-length {
|
|
|
@@ -262,6 +431,114 @@
|
|
|
text.slice(0, max-length - 3) + "..."
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+// #let add_newlines(text, n) = {
|
|
|
+// let result = ""
|
|
|
+// let chars = text.clusters()
|
|
|
+// for (i, char) in chars.enumerate() {
|
|
|
+// result += char
|
|
|
+// if calc.rem((i + 1), n == 0 and i < chars.len() - 1 {
|
|
|
+// result += "\n"
|
|
|
+// }
|
|
|
+// }
|
|
|
+// result
|
|
|
+// }
|
|
|
+
|
|
|
+#let break_long_words(text, max_length: 20, hyphen: "") = {
|
|
|
+ let words = text.split(" ")
|
|
|
+ let result = ()
|
|
|
+
|
|
|
+ for word in words {
|
|
|
+ if word.len() <= max_length {
|
|
|
+ result.push(word)
|
|
|
+ } else {
|
|
|
+ let segments = ()
|
|
|
+ let current_segment = ""
|
|
|
+ for char in word.clusters() {
|
|
|
+ if current_segment.len() + 1 > max_length {
|
|
|
+ segments.push(current_segment + hyphen)
|
|
|
+ current_segment = ""
|
|
|
+ }
|
|
|
+ current_segment += char
|
|
|
+ }
|
|
|
+ if current_segment != "" {
|
|
|
+ segments.push(current_segment)
|
|
|
+ }
|
|
|
+ result += segments
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ result.join(" ")
|
|
|
+}
|
|
|
+
|
|
|
+#let format_sequence(text, max_length: 40, hyphen: [#linebreak()]) = {
|
|
|
+ let words = text.split(" ")
|
|
|
+ let result = ()
|
|
|
+ // result.push("\n")
|
|
|
+
|
|
|
+ for word in words {
|
|
|
+ if word.len() <= max_length {
|
|
|
+ result.push(word)
|
|
|
+ } else {
|
|
|
+ let segments = ()
|
|
|
+ let current_segment = ""
|
|
|
+ for char in word.clusters() {
|
|
|
+ if current_segment.len() + 1 > max_length {
|
|
|
+ segments.push(current_segment + hyphen)
|
|
|
+ current_segment = ""
|
|
|
+ }
|
|
|
+ current_segment += char
|
|
|
+ }
|
|
|
+ if current_segment != "" {
|
|
|
+ segments.push(current_segment)
|
|
|
+ }
|
|
|
+ result += segments
|
|
|
+ }
|
|
|
+ }
|
|
|
+ result.push("")
|
|
|
+ let sequence = result.join(" ")
|
|
|
+
|
|
|
+ box(width: 100%, par(leading: 0.2em, sequence))
|
|
|
+}
|
|
|
+
|
|
|
+#let dna(sequence, line_length: 60) = {
|
|
|
+ let formatted = sequence.clusters().map(c => {
|
|
|
+ if c == "A" {
|
|
|
+ text(fill: red)[A]
|
|
|
+ } else if c == "T" {
|
|
|
+ text(fill: green)[T]
|
|
|
+ } else if c == "C" {
|
|
|
+ text(fill: blue)[C]
|
|
|
+ } else if c == "G" {
|
|
|
+ text(fill: orange)[G]
|
|
|
+ } else {
|
|
|
+ c
|
|
|
+ }
|
|
|
+ })
|
|
|
+
|
|
|
+ let lines = formatted.chunks(line_length).map(line => line.join())
|
|
|
+ let n_lines = lines.len()
|
|
|
+
|
|
|
+ let lines = lines.join("\n")
|
|
|
+
|
|
|
+ if n_lines > 1 {
|
|
|
+ parbreak()
|
|
|
+ }
|
|
|
+ align(left, box(
|
|
|
+ fill: luma(240),
|
|
|
+ inset: (x: 0.5em, y: 0.5em),
|
|
|
+ radius: 4pt,
|
|
|
+ align(left, text(
|
|
|
+ font: "Fira Code",
|
|
|
+ size: 10pt,
|
|
|
+ lines,
|
|
|
+ )),
|
|
|
+ ))
|
|
|
+ if n_lines > 1 {
|
|
|
+ parbreak()
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
#let to-string(content) = {
|
|
|
if content.has("text") {
|
|
|
content.text
|
|
|
@@ -273,6 +550,7 @@
|
|
|
" "
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
#let format-number(num) = {
|
|
|
let s = str(num).split("").filter(item => item != "")
|
|
|
let result = ""
|
|
|
@@ -286,45 +564,156 @@
|
|
|
result
|
|
|
}
|
|
|
|
|
|
+#let format_json(json_data) = {
|
|
|
+ let format_value(value) = {
|
|
|
+ if value == none {
|
|
|
+ ""
|
|
|
+ } else if type(value) == "string" {
|
|
|
+ if value != "." {
|
|
|
+ value.replace(";", ", ").replace("=", ": ")
|
|
|
+ } else {
|
|
|
+ ""
|
|
|
+ }
|
|
|
+ } else if type(value) == "array" {
|
|
|
+ let items = value.map(v => format_value(v))
|
|
|
+ "[" + items.join(", ") + "]"
|
|
|
+ } else if type(value) == "dictionary" {
|
|
|
+ "{" + format_json(value) + "}"
|
|
|
+ } else {
|
|
|
+ str(value)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ if type(json_data) == "dictionary" {
|
|
|
+ let result = ()
|
|
|
+ for (key, value) in json_data {
|
|
|
+ let formatted_value = format_value(value)
|
|
|
+ if formatted_value != "" {
|
|
|
+ if key == "svinsseq" {
|
|
|
+ formatted_value = dna(formatted_value)
|
|
|
+ }
|
|
|
+ result.push(upper(key) + ": " + formatted_value)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ result.join(", ")
|
|
|
+ } else {
|
|
|
+ format_value(json_data)
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
#let card(d) = {
|
|
|
+ set text(12pt)
|
|
|
let position_fmt = format-number(d.position)
|
|
|
let title_bg_color = rgb("#f9fafb00")
|
|
|
- rect(
|
|
|
- width: 100%,
|
|
|
- fill: cr_colors.light_grey,
|
|
|
- stroke: cr_colors.dark_grey,
|
|
|
- inset: 0em,
|
|
|
-
|
|
|
- radius: 12pt,
|
|
|
- [
|
|
|
- #pad(top: 0.5em, bottom: -0.5em, box(
|
|
|
- width: 100%,
|
|
|
- fill: title_bg_color,
|
|
|
- align(center,
|
|
|
- text(weight: "bold")[
|
|
|
- #d.contig:#position_fmt #d.reference#sym.quote.angle.r.single#truncate(d.alternative, 25)
|
|
|
- ]
|
|
|
- )
|
|
|
- ))
|
|
|
- #line(length: 100%)
|
|
|
-
|
|
|
|
|
|
- #lorem(50)
|
|
|
- ]
|
|
|
+ let grid_content = ()
|
|
|
+
|
|
|
+ let callers_data = json.decode(d.callers_data)
|
|
|
+
|
|
|
+ // Title
|
|
|
+ let alt = d.alternative
|
|
|
+
|
|
|
+ // TODO: add that in pandora_lib_variants
|
|
|
+ if d.callers == "Nanomonsv" and alt == "<INS>" {
|
|
|
+ alt = d.reference + callers_data.at(0).info.Nanomonsv.svinsseq
|
|
|
+ }
|
|
|
+
|
|
|
+ let title = d.contig + ":" + position_fmt + " " + d.reference + sym
|
|
|
+ .quote
|
|
|
+ .angle
|
|
|
+ .r
|
|
|
+ .single + truncate(alt, 30)
|
|
|
+
|
|
|
+ grid_content.push(
|
|
|
+ grid.cell(
|
|
|
+ fill: cr_colors.light_grey,
|
|
|
+ align: center,
|
|
|
+ block(width: 100%, title),
|
|
|
+ ),
|
|
|
+ )
|
|
|
+
|
|
|
+ // Consequences
|
|
|
+ let consequences = d.consequence.replace(",", ", ").replace(
|
|
|
+ "_",
|
|
|
+ " ",
|
|
|
+ ) + " " + emph(strong(d.gene))
|
|
|
+ grid_content.push(
|
|
|
+ grid.cell(fill: cr_colors.light_grey, align: center, consequences),
|
|
|
+ )
|
|
|
+
|
|
|
+ // hgvs_c
|
|
|
+ if d.hgvs_c != none {
|
|
|
+ grid_content.push(
|
|
|
+ grid.cell(fill: rgb("#fef08a"), align: center, truncate(d.hgvs_c, 50)),
|
|
|
+ )
|
|
|
+ }
|
|
|
+
|
|
|
+ // hgvs_c
|
|
|
+ if d.hgvs_p != none {
|
|
|
+ grid_content.push(
|
|
|
+ grid.cell(fill: rgb("#fecaca"), align: center, truncate(d.hgvs_p, 50)),
|
|
|
+ )
|
|
|
+ }
|
|
|
+
|
|
|
+ // Content
|
|
|
+ let content = ()
|
|
|
+ content.push(
|
|
|
+ badge-red("VAF: " + str(calc.round(d.m_vaf * 100, digits: 2)) + "%"),
|
|
|
+ )
|
|
|
+ // content.push(" ")
|
|
|
+
|
|
|
+ if d.cosmic_n != none {
|
|
|
+ content.push(badge-red("Cosmic: " + str(d.cosmic_n)))
|
|
|
+ }
|
|
|
+
|
|
|
+ if d.gnomad_af != none {
|
|
|
+ content.push(badge-blue("GnomAD: " + str(d.gnomad_af)))
|
|
|
+ }
|
|
|
+
|
|
|
+ let callers_contents = ()
|
|
|
+ for caller_data in callers_data {
|
|
|
+ let caller = ""
|
|
|
+ for (k, v) in caller_data.format {
|
|
|
+ caller = k
|
|
|
+ }
|
|
|
+ callers_contents.push(underline(caller) + ":")
|
|
|
+ callers_contents.push([
|
|
|
+ Qual: #caller_data.qual,
|
|
|
+ #(format_json(caller_data.format.at(caller)), format_json(caller_data.info.at(caller))).filter(v => v != "").join(", ")
|
|
|
+ ])
|
|
|
+ }
|
|
|
+
|
|
|
+ content.push(
|
|
|
+ grid(
|
|
|
+ columns: 1,
|
|
|
+ inset: 0.5em,
|
|
|
+ ..callers_contents
|
|
|
+ ),
|
|
|
+ )
|
|
|
+
|
|
|
+ grid_content.push(grid.cell(fill: white, content.join(" ")))
|
|
|
+
|
|
|
+
|
|
|
+ block(
|
|
|
+ breakable: false,
|
|
|
+ width: 100%,
|
|
|
+ grid(
|
|
|
+ columns: 1,
|
|
|
+ inset: 0.5em,
|
|
|
+ stroke: cr_colors.dark_grey,
|
|
|
+ ..grid_content
|
|
|
+ ),
|
|
|
)
|
|
|
}
|
|
|
-// #let variant(d) = {
|
|
|
-// card(d)
|
|
|
-// }
|
|
|
|
|
|
-#let variants(path) = {
|
|
|
+#let variants(path, interpretation: "PATHO") = {
|
|
|
let data = json(path)
|
|
|
- for var in data {
|
|
|
+ let patho = data.filter(d => d.interpretation == interpretation)
|
|
|
+ for var in patho {
|
|
|
card(var)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-
|
|
|
#set heading(numbering: (..numbers) => {
|
|
|
if numbers.pos().len() >= 2 and numbers.pos().len() <= 3 {
|
|
|
numbering("1.1", ..numbers.pos().slice(1))
|
|
|
@@ -340,19 +729,20 @@
|
|
|
|
|
|
== Alignement
|
|
|
#grid(
|
|
|
- columns: (1fr, 1fr), gutter: 3pt, [
|
|
|
+ columns: (1fr, 1fr),
|
|
|
+ gutter: 3pt,
|
|
|
+ [
|
|
|
==== Diagnostic sample
|
|
|
#set text(size: 11pt)
|
|
|
- #reportBam(
|
|
|
- sys.inputs.base + "/diag/" + sys.inputs.id + "_diag_hs1_info.json",
|
|
|
- )
|
|
|
- ], [
|
|
|
+ #reportBam(sys.inputs.base + "/diag/" + sys.inputs.id + "_diag_hs1_info.json")
|
|
|
+ ],
|
|
|
+ [
|
|
|
==== MRD sample
|
|
|
#set text(size: 11pt)
|
|
|
- #reportBam(
|
|
|
- sys.inputs.base + "/mrd/" + sys.inputs.id + "_mrd_hs1_info.json",
|
|
|
- )
|
|
|
- #set footnote(numbering: n => { " " })
|
|
|
+ #reportBam(sys.inputs.base + "/mrd/" + sys.inputs.id + "_mrd_hs1_info.json")
|
|
|
+ #set footnote(numbering: n => {
|
|
|
+ " "
|
|
|
+ })
|
|
|
#footnote[Values computed by #link("https://github.com/wdecoster/cramino")[cramino] v0.14.5
|
|
|
]
|
|
|
],
|
|
|
@@ -363,14 +753,17 @@
|
|
|
#[
|
|
|
#set text(size: 10pt)
|
|
|
#printReadCount(
|
|
|
- sys.inputs.base + "/diag/" + sys.inputs.id + "_diag_hs1_info.json", sys.inputs.base + "/mrd/" + sys.inputs.id + "_mrd_hs1_info.json",
|
|
|
+ sys.inputs.base + "/diag/" + sys.inputs.id + "_diag_hs1_info.json",
|
|
|
+ sys.inputs.base + "/mrd/" + sys.inputs.id + "_mrd_hs1_info.json",
|
|
|
)
|
|
|
]
|
|
|
|
|
|
=== Coverage by chromosome
|
|
|
==== Proportion at given depth by chromosome
|
|
|
#reportCoverage(sys.inputs.base + "/diag/report/data/scan/" + sys.inputs.id)
|
|
|
-#set footnote(numbering: n => { " " })
|
|
|
+#set footnote(numbering: n => {
|
|
|
+ " "
|
|
|
+})
|
|
|
#footnote[Values computed by Pandora development version]
|
|
|
|
|
|
== Variants
|
|
|
@@ -378,15 +771,29 @@
|
|
|
#pagebreak()
|
|
|
==== VCF filters
|
|
|
#pad(
|
|
|
- top: 0.8cm, align(center, scale(x: 100%, y: 100%, reflow: true, variantsFlow(
|
|
|
- sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_variants_stats.json",
|
|
|
- ))),
|
|
|
+ top: 0.8cm,
|
|
|
+ align(
|
|
|
+ center,
|
|
|
+ scale(
|
|
|
+ x: 100%,
|
|
|
+ y: 100%,
|
|
|
+ reflow: true,
|
|
|
+ variantsFlow(sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_variants_stats.json"),
|
|
|
+ ),
|
|
|
+ ),
|
|
|
)
|
|
|
==== BAM filters
|
|
|
-#pad(top: 0.8cm,
|
|
|
- align(center, scale(x: 100%, y: 100%, reflow: true, bamFilter(
|
|
|
- sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_variants_stats.json",
|
|
|
- ))),
|
|
|
+#pad(
|
|
|
+ top: 0.8cm,
|
|
|
+ align(
|
|
|
+ center,
|
|
|
+ scale(
|
|
|
+ x: 100%,
|
|
|
+ y: 100%,
|
|
|
+ reflow: true,
|
|
|
+ bamFilter(sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_variants_stats.json"),
|
|
|
+ ),
|
|
|
+ ),
|
|
|
)
|
|
|
#pagebreak()
|
|
|
|
|
|
@@ -403,7 +810,23 @@
|
|
|
#pagebreak()
|
|
|
|
|
|
=== Selected Variants
|
|
|
-#variants(sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_annot_variants.json")
|
|
|
+==== Pathogenics
|
|
|
+#variants(
|
|
|
+ sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_annot_variants.json",
|
|
|
+ interpretation: "PATHO",
|
|
|
+)
|
|
|
+
|
|
|
+==== Likely Pathogenics
|
|
|
+#variants(
|
|
|
+ sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_annot_variants.json",
|
|
|
+ interpretation: "PROBPATHO",
|
|
|
+)
|
|
|
+
|
|
|
+==== Variants of uncertain significance
|
|
|
+#variants(
|
|
|
+ sys.inputs.base + "/diag/report/data/" + sys.inputs.id + "_annot_variants.json",
|
|
|
+ interpretation: "US",
|
|
|
+)
|
|
|
|
|
|
#pagebreak()
|
|
|
== Conclusion
|