PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
85151-85200 / 86044 show all | |||||||||||||||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | * | 99.6302 | 99.4260 | 99.8353 | 48.5599 | 2425 | 14 | 2425 | 4 | 0 | 0.0000 | |
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | het | 99.4286 | 99.1139 | 99.7452 | 49.4364 | 1566 | 14 | 1566 | 4 | 0 | 0.0000 | |
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 50.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 46.8731 | 858 | 0 | 858 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | * | 92.7536 | 86.8778 | 99.4819 | 90.7523 | 192 | 29 | 192 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | het | 90.3226 | 82.8947 | 99.2126 | 91.1560 | 126 | 26 | 126 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | homalt | 97.7778 | 95.6522 | 100.0000 | 89.8305 | 66 | 3 | 66 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | het | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt101bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 94.1176 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt51bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 93.3333 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 91.6667 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 81.2500 | 12 | 0 | 12 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_diTR_11to50 | hetalt | 100.0000 | 100.0000 | 100.0000 | 94.1176 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_diTR_51to200 | * | 89.6552 | 81.2500 | 100.0000 | 97.7816 | 13 | 3 | 13 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_diTR_51to200 | het | 82.3529 | 70.0000 | 100.0000 | 98.2544 | 7 | 3 | 7 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_diTR_51to200 | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_diTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 96.7391 | 6 | 0 | 6 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_homopolymer_6to10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 77.7778 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_homopolymer_gt10 | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_homopolymer_gt10 | het | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_homopolymer_gt10 | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_homopolymer_gt10 | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_11to50 | hetalt | 100.0000 | 100.0000 | 100.0000 | 83.3333 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_51to200 | * | 90.2174 | 82.1782 | 100.0000 | 93.4543 | 83 | 18 | 83 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_51to200 | het | 86.2069 | 75.7576 | 100.0000 | 94.2661 | 50 | 16 | 50 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_51to200 | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_51to200 | homalt | 97.0588 | 94.2857 | 100.0000 | 91.6667 | 33 | 2 | 33 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_gt200 | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_gt200 | het | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_gt200 | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_quadTR_gt200 | homalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_triTR_11to50 | het | 99.6360 | 99.4350 | 99.8379 | 27.9708 | 2464 | 14 | 2463 | 4 | 0 | 0.0000 | |
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_triTR_11to50 | hetalt | 0.0000 | 100.0000 | 0 | 1 | 0 | 0 | 0 | ||||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_triTR_51to200 | * | 93.3333 | 87.5000 | 100.0000 | 94.4882 | 7 | 1 | 7 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_triTR_51to200 | het | 90.9091 | 83.3333 | 100.0000 | 94.1860 | 5 | 1 | 5 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_triTR_51to200 | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | SNP | ti | lowcmp_SimpleRepeat_triTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 95.1220 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l125_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 69.2308 | 8 | 0 | 8 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l125_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 58.6207 | 24 | 0 | 24 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l125_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 65.2174 | 24 | 0 | 24 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l125_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 65.7143 | 24 | 0 | 24 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l150_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 83.3333 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l150_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 66.6667 | 15 | 0 | 15 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l150_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 70.5882 | 15 | 0 | 15 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l150_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 71.1538 | 15 | 0 | 15 | 0 | 0 | ||
raldana-dualsentieon | SNP | ti | map_l250_m0_e0 | het | 96.7570 | 97.4304 | 96.0929 | 92.6538 | 910 | 24 | 910 | 37 | 0 | 0.0000 |