PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3151-3200 / 86044 show all | |||||||||||||||
cchapple-custom | INDEL | * | decoy | het | 100.0000 | 100.0000 | 100.0000 | 99.9628 | 6 | 0 | 7 | 0 | 0 | ||
cchapple-custom | INDEL | * | decoy | hetalt | 0.0000 | 100.0000 | 0.0000 | 0.0000 | 1 | 0 | 0 | 0 | 0 | ||
cchapple-custom | INDEL | * | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9250 | 3 | 0 | 3 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.3674 | 12 | 0 | 15 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 0.0000 | 100.0000 | 0.0000 | 0.0000 | 3 | 0 | 0 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.4286 | 10 | 0 | 13 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 0.0000 | 100.0000 | 0.0000 | 0.0000 | 3 | 0 | 0 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 97.4790 | 3 | 0 | 3 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 98.0000 | 2 | 0 | 2 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 94.7368 | 1 | 0 | 1 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_SimpleRepeat_triTR_51to200 | homalt | 94.0000 | 100.0000 | 88.6792 | 36.1446 | 47 | 0 | 47 | 6 | 6 | 100.0000 | |
cchapple-custom | INDEL | * | map_l250_m0_e0 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3795 | 25 | 0 | 25 | 0 | 0 | ||
cchapple-custom | INDEL | * | map_l250_m1_e0 | hetalt | 0.0000 | 100.0000 | 0.0000 | 0.0000 | 6 | 0 | 0 | 0 | 0 | ||
cchapple-custom | INDEL | * | map_l250_m2_e0 | hetalt | 0.0000 | 100.0000 | 0.0000 | 0.0000 | 6 | 0 | 0 | 0 | 0 | ||
cchapple-custom | INDEL | * | map_l250_m2_e1 | hetalt | 0.0000 | 100.0000 | 0.0000 | 0.0000 | 6 | 0 | 0 | 0 | 0 | ||
cchapple-custom | INDEL | * | segdupwithalt | * | 100.0000 | 100.0000 | 100.0000 | 99.9974 | 1 | 0 | 1 | 0 | 0 | ||
cchapple-custom | INDEL | * | segdupwithalt | het | 100.0000 | 100.0000 | 100.0000 | 99.9964 | 1 | 0 | 1 | 0 | 0 | ||
cchapple-custom | INDEL | * | tech_badpromoters | hetalt | 0.0000 | 100.0000 | 0.0000 | 0.0000 | 4 | 0 | 0 | 0 | 0 | ||
cchapple-custom | INDEL | * | tech_badpromoters | homalt | 100.0000 | 100.0000 | 100.0000 | 57.1429 | 33 | 0 | 33 | 0 | 0 | ||
astatham-gatk | SNP | * | lowcmp_SimpleRepeat_triTR_51to200 | het | 100.0000 | 100.0000 | 100.0000 | 95.0704 | 7 | 0 | 7 | 0 | 0 | ||
astatham-gatk | SNP | * | lowcmp_SimpleRepeat_triTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3333 | 2 | 0 | 2 | 0 | 0 | ||
astatham-gatk | SNP | * | map_l100_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 68.0000 | 16 | 0 | 16 | 0 | 0 | ||
astatham-gatk | SNP | * | map_l125_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 75.6757 | 9 | 0 | 9 | 0 | 0 | ||
astatham-gatk | SNP | * | map_l150_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.2857 | 3 | 0 | 3 | 0 | 0 | ||
astatham-gatk | SNP | * | map_l250_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.7436 | 4 | 0 | 4 | 0 | 0 | ||
astatham-gatk | SNP | * | map_l250_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.3617 | 5 | 0 | 5 | 0 | 0 | ||
astatham-gatk | SNP | * | map_l250_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.3617 | 5 | 0 | 5 | 0 | 0 | ||
astatham-gatk | SNP | * | segdup | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.6825 | 7 | 0 | 7 | 0 | 0 | ||
astatham-gatk | SNP | ti | func_cds | hetalt | 100.0000 | 100.0000 | 100.0000 | 46.6667 | 8 | 0 | 8 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331 | hetalt | 100.0000 | 100.0000 | 100.0000 | 83.0986 | 12 | 0 | 12 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt101bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 50.0000 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 50.0000 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt101bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 94.1176 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt51bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 93.3333 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 92.0000 | 2 | 0 | 2 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 83.0986 | 12 | 0 | 12 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_diTR_11to50 | hetalt | 100.0000 | 100.0000 | 100.0000 | 94.7368 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_diTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 96.5116 | 6 | 0 | 6 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_homopolymer_6to10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 80.0000 | 2 | 0 | 2 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_quadTR_11to50 | hetalt | 100.0000 | 100.0000 | 100.0000 | 83.3333 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_triTR_11to50 | homalt | 99.9650 | 100.0000 | 99.9300 | 26.8443 | 1427 | 0 | 1427 | 1 | 1 | 100.0000 | |
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_triTR_51to200 | * | 100.0000 | 100.0000 | 100.0000 | 94.5578 | 8 | 0 | 8 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_triTR_51to200 | het | 100.0000 | 100.0000 | 100.0000 | 92.9412 | 6 | 0 | 6 | 0 | 0 | ||
astatham-gatk | SNP | ti | lowcmp_SimpleRepeat_triTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 96.7742 | 2 | 0 | 2 | 0 | 0 | ||
astatham-gatk | SNP | ti | map_l100_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 65.0000 | 14 | 0 | 14 | 0 | 0 | ||
astatham-gatk | SNP | ti | map_l100_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 68.1319 | 29 | 0 | 29 | 0 | 0 | ||
astatham-gatk | SNP | ti | map_l100_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 69.6970 | 30 | 0 | 30 | 0 | 0 | ||
astatham-gatk | SNP | ti | map_l100_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 69.0000 | 31 | 0 | 31 | 0 | 0 | ||
astatham-gatk | SNP | ti | map_l125_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 71.4286 | 8 | 0 | 8 | 0 | 0 | ||
astatham-gatk | SNP | ti | map_l125_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 64.1791 | 24 | 0 | 24 | 0 | 0 |