PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5651-5700 / 86044 show all | |||||||||||||||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_diTR_51to200 | * | 98.0392 | 96.1538 | 100.0000 | 96.7866 | 25 | 1 | 25 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_diTR_51to200 | het | 96.9697 | 94.1176 | 100.0000 | 97.2556 | 16 | 1 | 16 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_diTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 95.3368 | 9 | 0 | 9 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_homopolymer_6to10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 68.7500 | 5 | 0 | 5 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_quadTR_11to50 | hetalt | 100.0000 | 100.0000 | 100.0000 | 64.2857 | 5 | 0 | 5 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_quadTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 93.1818 | 6 | 0 | 6 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_triTR_11to50 | homalt | 99.9237 | 99.8474 | 100.0000 | 34.5500 | 1309 | 2 | 1309 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_triTR_51to200 | * | 100.0000 | 100.0000 | 100.0000 | 98.5507 | 1 | 0 | 1 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | lowcmp_SimpleRepeat_triTR_51to200 | het | 100.0000 | 100.0000 | 100.0000 | 98.2143 | 1 | 0 | 1 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l100_m0_e0 | hetalt | 96.7742 | 93.7500 | 100.0000 | 68.7500 | 15 | 1 | 15 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l100_m1_e0 | hetalt | 97.5000 | 95.1220 | 100.0000 | 70.4545 | 39 | 2 | 39 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l100_m2_e0 | hetalt | 97.5610 | 95.2381 | 100.0000 | 72.7891 | 40 | 2 | 40 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l100_m2_e1 | hetalt | 97.6190 | 95.3488 | 100.0000 | 72.2973 | 41 | 2 | 41 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l125_m0_e0 | hetalt | 94.1176 | 88.8889 | 100.0000 | 77.1429 | 8 | 1 | 8 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l125_m1_e0 | hetalt | 96.5517 | 93.3333 | 100.0000 | 71.4286 | 28 | 2 | 28 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l125_m2_e0 | hetalt | 96.5517 | 93.3333 | 100.0000 | 76.0684 | 28 | 2 | 28 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l125_m2_e1 | hetalt | 96.5517 | 93.3333 | 100.0000 | 76.0684 | 28 | 2 | 28 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l150_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 88.8889 | 3 | 0 | 3 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l150_m1_e0 | hetalt | 94.7368 | 90.0000 | 100.0000 | 76.3158 | 18 | 2 | 18 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l150_m2_e0 | hetalt | 94.7368 | 90.0000 | 100.0000 | 80.0000 | 18 | 2 | 18 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l150_m2_e1 | hetalt | 94.7368 | 90.0000 | 100.0000 | 80.0000 | 18 | 2 | 18 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l250_m1_e0 | hetalt | 85.7143 | 75.0000 | 100.0000 | 91.8919 | 3 | 1 | 3 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l250_m2_e0 | hetalt | 88.8889 | 80.0000 | 100.0000 | 90.9091 | 4 | 1 | 4 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_l250_m2_e1 | hetalt | 88.8889 | 80.0000 | 100.0000 | 90.9091 | 4 | 1 | 4 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | map_siren | hetalt | 98.1132 | 96.2963 | 100.0000 | 69.5312 | 78 | 3 | 78 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | segdup | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.6667 | 7 | 0 | 7 | 0 | 0 | ||
bgallagher-sentieon | SNP | tv | tech_badpromoters | het | 98.4615 | 96.9697 | 100.0000 | 55.5556 | 32 | 1 | 32 | 0 | 0 | ||
cchapple-custom | INDEL | * | decoy | * | 100.0000 | 100.0000 | 100.0000 | 99.9561 | 10 | 0 | 10 | 0 | 0 | ||
cchapple-custom | INDEL | * | decoy | het | 100.0000 | 100.0000 | 100.0000 | 99.9628 | 6 | 0 | 7 | 0 | 0 | ||
cchapple-custom | INDEL | * | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9250 | 3 | 0 | 3 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | * | 94.7368 | 90.0000 | 100.0000 | 99.4547 | 18 | 2 | 18 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.3674 | 12 | 0 | 15 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | homalt | 75.0000 | 60.0000 | 100.0000 | 99.6774 | 3 | 2 | 3 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | * | 93.7500 | 88.2353 | 100.0000 | 99.5292 | 15 | 2 | 15 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.4286 | 10 | 0 | 13 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | homalt | 66.6667 | 50.0000 | 100.0000 | 99.7805 | 2 | 2 | 2 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 97.4790 | 3 | 0 | 3 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 98.0000 | 2 | 0 | 2 | 0 | 0 | ||
cchapple-custom | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 94.7368 | 1 | 0 | 1 | 0 | 0 | ||
cchapple-custom | INDEL | * | map_l250_m0_e0 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3795 | 25 | 0 | 25 | 0 | 0 | ||
cchapple-custom | INDEL | * | segdupwithalt | * | 100.0000 | 100.0000 | 100.0000 | 99.9974 | 1 | 0 | 1 | 0 | 0 | ||
cchapple-custom | INDEL | * | segdupwithalt | het | 100.0000 | 100.0000 | 100.0000 | 99.9964 | 1 | 0 | 1 | 0 | 0 | ||
cchapple-custom | INDEL | * | tech_badpromoters | * | 99.3377 | 98.6842 | 100.0000 | 54.0698 | 75 | 1 | 79 | 0 | 0 | ||
cchapple-custom | INDEL | * | tech_badpromoters | het | 98.7013 | 97.4359 | 100.0000 | 51.5789 | 38 | 1 | 46 | 0 | 0 | ||
cchapple-custom | INDEL | * | tech_badpromoters | homalt | 100.0000 | 100.0000 | 100.0000 | 57.1429 | 33 | 0 | 33 | 0 | 0 | ||
cchapple-custom | INDEL | C16_PLUS | * | homalt | 0.0000 | 0.0000 | 100.0000 | 95.1100 | 0 | 0 | 20 | 0 | 0 | ||
cchapple-custom | INDEL | C16_PLUS | HG002complexvar | homalt | 0.0000 | 0.0000 | 100.0000 | 88.0952 | 0 | 0 | 20 | 0 | 0 | ||
cchapple-custom | INDEL | C16_PLUS | lowcmp_AllRepeats_51to200bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 98.7179 | 0 | 0 | 1 | 0 | 0 | ||
cchapple-custom | INDEL | C16_PLUS | lowcmp_AllRepeats_lt51bp_gt95identity_merged | homalt | 0.0000 | 0.0000 | 100.0000 | 96.9136 | 0 | 0 | 5 | 0 | 0 | ||
cchapple-custom | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331 | homalt | 0.0000 | 0.0000 | 100.0000 | 97.1963 | 0 | 0 | 6 | 0 | 0 |