PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
81951-82000 / 86044 show all | |||||||||||||||
raldana-dualsentieon | INDEL | D16_PLUS | map_l100_m2_e1 | hetalt | 90.9091 | 83.3333 | 100.0000 | 74.0385 | 25 | 5 | 27 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 94.7368 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m0_e0 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.0588 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.2857 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m2_e1 | hetalt | 85.7143 | 75.0000 | 100.0000 | 90.0000 | 3 | 1 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l150_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 95.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l150_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 95.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l150_m2_e1 | hetalt | 66.6667 | 50.0000 | 100.0000 | 95.0000 | 1 | 1 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3684 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3684 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_siren | hetalt | 91.2281 | 83.8710 | 100.0000 | 80.5556 | 26 | 5 | 28 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | segdup | hetalt | 87.5000 | 77.7778 | 100.0000 | 91.8919 | 7 | 2 | 9 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | segdup | homalt | 100.0000 | 100.0000 | 100.0000 | 95.6835 | 12 | 0 | 12 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | * | 100.0000 | 100.0000 | 100.0000 | 42.8571 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | het | 100.0000 | 100.0000 | 100.0000 | 0.0000 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | * | 100.0000 | 100.0000 | 100.0000 | 99.9263 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | het | 100.0000 | 100.0000 | 100.0000 | 99.9456 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.5885 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9339 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | func_cds | * | 100.0000 | 100.0000 | 100.0000 | 38.9313 | 159 | 0 | 160 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | func_cds | het | 100.0000 | 100.0000 | 100.0000 | 44.8718 | 85 | 0 | 86 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | func_cds | homalt | 100.0000 | 100.0000 | 100.0000 | 29.5238 | 74 | 0 | 74 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_51to200bp_gt95identity_merged | hetalt | 92.2212 | 85.5653 | 100.0000 | 31.5699 | 1559 | 263 | 1591 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_gt200bp_gt95identity_merged | * | 95.2381 | 90.9091 | 100.0000 | 98.7406 | 10 | 1 | 10 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_gt200bp_gt95identity_merged | het | 93.3333 | 87.5000 | 100.0000 | 98.5597 | 7 | 1 | 7 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 97.1429 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 99.2674 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_lt51bp_gt95identity_merged | hetalt | 97.4574 | 95.0409 | 100.0000 | 63.3890 | 8835 | 461 | 8854 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331 | hetalt | 96.5598 | 93.3484 | 100.0000 | 31.4834 | 8673 | 618 | 8716 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_51to200bp_gt95identity_merged | hetalt | 89.6000 | 81.1594 | 100.0000 | 56.2500 | 56 | 13 | 56 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 98.6755 | 10 | 0 | 10 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 98.4749 | 7 | 0 | 7 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.6667 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 99.2481 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt101bp_gt95identity_merged | hetalt | 97.9310 | 95.9459 | 100.0000 | 80.6011 | 71 | 3 | 71 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | het | 99.5413 | 99.0868 | 100.0000 | 78.5149 | 651 | 6 | 651 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | hetalt | 98.6301 | 97.2973 | 100.0000 | 87.2340 | 36 | 1 | 36 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | hetalt | 91.9665 | 85.1278 | 100.0000 | 30.7314 | 1099 | 192 | 1127 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt101bp_gt95identity_merged | hetalt | 96.3442 | 92.9463 | 100.0000 | 24.7911 | 7524 | 571 | 7560 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt51bp_gt95identity_merged | hetalt | 97.1063 | 94.3753 | 100.0000 | 23.8971 | 6443 | 384 | 6452 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_gt95identity_merged | hetalt | 96.3146 | 92.8912 | 100.0000 | 26.9952 | 7592 | 581 | 7629 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_merged | hetalt | 96.5598 | 93.3484 | 100.0000 | 31.4834 | 8673 | 618 | 8716 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_diTR_11to50 | hetalt | 97.1241 | 94.4089 | 100.0000 | 25.3923 | 6501 | 385 | 6514 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_diTR_51to200 | hetalt | 77.8607 | 63.7475 | 100.0000 | 28.1116 | 313 | 178 | 335 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_homopolymer_6to10 | hetalt | 98.7685 | 97.5669 | 100.0000 | 70.8696 | 401 | 10 | 402 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_homopolymer_gt10 | * | 100.0000 | 100.0000 | 100.0000 | 99.9991 | 1 | 0 | 1 | 0 | 0 |