PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2951-3000 / 86044 show all | |||||||||||||||
ltrigg-rtg2 | INDEL | * | func_cds | homalt | 99.7783 | 99.5575 | 100.0000 | 29.6875 | 225 | 1 | 225 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 98.0519 | 3 | 0 | 3 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | homalt | 75.0000 | 60.0000 | 100.0000 | 99.6099 | 3 | 2 | 3 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 97.9592 | 3 | 0 | 3 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | homalt | 66.6667 | 50.0000 | 100.0000 | 99.7379 | 2 | 2 | 2 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | homalt | 99.7778 | 99.5565 | 100.0000 | 68.6843 | 1347 | 6 | 1340 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 94.0000 | 3 | 0 | 3 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 94.4444 | 2 | 0 | 2 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 83.3333 | 1 | 0 | 1 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | het | 89.8734 | 81.6092 | 100.0000 | 99.8937 | 71 | 16 | 73 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | homalt | 89.4737 | 80.9524 | 100.0000 | 99.9544 | 17 | 4 | 18 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_SimpleRepeat_triTR_51to200 | hetalt | 95.8333 | 92.0000 | 100.0000 | 35.1955 | 115 | 10 | 116 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | lowcmp_SimpleRepeat_triTR_51to200 | homalt | 98.9247 | 97.8723 | 100.0000 | 36.9863 | 46 | 1 | 46 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l100_m0_e0 | hetalt | 91.8033 | 84.8485 | 100.0000 | 94.2857 | 28 | 5 | 30 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l100_m1_e0 | hetalt | 91.2281 | 83.8710 | 100.0000 | 91.4516 | 104 | 20 | 106 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l100_m2_e0 | hetalt | 91.3043 | 84.0000 | 100.0000 | 91.9488 | 105 | 20 | 107 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l100_m2_e1 | hetalt | 91.3580 | 84.0909 | 100.0000 | 91.6667 | 111 | 21 | 113 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l125_m0_e0 | hetalt | 95.2381 | 90.9091 | 100.0000 | 96.7742 | 10 | 1 | 12 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l125_m1_e0 | hetalt | 94.7368 | 90.0000 | 100.0000 | 95.1282 | 36 | 4 | 38 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l125_m2_e0 | hetalt | 93.6709 | 88.0952 | 100.0000 | 95.5429 | 37 | 5 | 39 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l125_m2_e1 | hetalt | 92.5000 | 86.0465 | 100.0000 | 95.5982 | 37 | 6 | 39 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l150_m0_e0 | hetalt | 94.1176 | 88.8889 | 100.0000 | 96.6777 | 8 | 1 | 10 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l150_m1_e0 | hetalt | 92.3077 | 85.7143 | 100.0000 | 96.7742 | 18 | 3 | 19 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l150_m2_e0 | hetalt | 92.3077 | 85.7143 | 100.0000 | 97.1471 | 18 | 3 | 19 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l150_m2_e1 | hetalt | 90.4762 | 82.6087 | 100.0000 | 97.0501 | 19 | 4 | 20 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l250_m0_e0 | homalt | 95.8333 | 92.0000 | 100.0000 | 95.5638 | 23 | 2 | 24 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l250_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 98.5673 | 6 | 0 | 5 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l250_m1_e0 | homalt | 98.1308 | 96.3303 | 100.0000 | 91.7518 | 105 | 4 | 105 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l250_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 98.7685 | 6 | 0 | 5 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l250_m2_e0 | homalt | 98.2301 | 96.5217 | 100.0000 | 92.6733 | 111 | 4 | 111 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l250_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 98.7923 | 6 | 0 | 5 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | map_l250_m2_e1 | homalt | 98.2456 | 96.5517 | 100.0000 | 92.8205 | 112 | 4 | 112 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | segdup | hetalt | 98.0392 | 96.1538 | 100.0000 | 95.7193 | 125 | 5 | 133 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | segdupwithalt | * | 100.0000 | 100.0000 | 100.0000 | 99.9964 | 1 | 0 | 1 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | segdupwithalt | het | 100.0000 | 100.0000 | 100.0000 | 99.9938 | 1 | 0 | 1 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | tech_badpromoters | hetalt | 100.0000 | 100.0000 | 100.0000 | 55.5556 | 4 | 0 | 4 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | * | tech_badpromoters | homalt | 98.4615 | 96.9697 | 100.0000 | 54.9296 | 32 | 1 | 32 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_AllRepeats_51to200bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 97.6744 | 0 | 0 | 2 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_AllRepeats_lt51bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 96.1538 | 0 | 0 | 7 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331 | homalt | 0.0000 | 0.0000 | 100.0000 | 96.4567 | 0 | 0 | 9 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_51to200bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 94.1176 | 0 | 0 | 1 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt101bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 96.7742 | 0 | 0 | 1 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | het | 0.0000 | 0.0000 | 100.0000 | 98.6111 | 0 | 0 | 1 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 98.1481 | 0 | 0 | 1 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt101bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 95.3333 | 0 | 0 | 7 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt51bp_gt95identity_merged | hetalt | 0.0000 | 0.0000 | 100.0000 | 94.1558 | 0 | 0 | 9 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt51bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 93.8144 | 0 | 0 | 6 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 95.6989 | 0 | 0 | 8 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 96.4567 | 0 | 0 | 9 | 0 | 0 | ||
ltrigg-rtg2 | INDEL | C16_PLUS | lowcmp_SimpleRepeat_diTR_11to50 | het | 0.0000 | 0.0000 | 100.0000 | 96.4789 | 0 | 0 | 5 | 0 | 0 |