PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
85051-85100 / 86044 show all | |||||||||||||||
raldana-dualsentieon | INDEL | * | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9232 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | func_cds | homalt | 100.0000 | 100.0000 | 100.0000 | 36.3380 | 226 | 0 | 226 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.2500 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.2816 | 10 | 0 | 10 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 95.7143 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 95.2381 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.8747 | 16 | 0 | 16 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | map_l250_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.0784 | 6 | 0 | 6 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | map_l250_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.7391 | 6 | 0 | 6 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | map_l250_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.8421 | 6 | 0 | 6 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | segdupwithalt | * | 100.0000 | 100.0000 | 100.0000 | 99.9969 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | segdupwithalt | het | 100.0000 | 100.0000 | 100.0000 | 99.9951 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | tech_badpromoters | hetalt | 100.0000 | 100.0000 | 100.0000 | 50.0000 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | tech_badpromoters | homalt | 100.0000 | 100.0000 | 100.0000 | 57.1429 | 33 | 0 | 33 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | decoy | * | 100.0000 | 100.0000 | 100.0000 | 98.9565 | 6 | 0 | 6 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | decoy | het | 100.0000 | 100.0000 | 100.0000 | 98.9873 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 98.6486 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | func_cds | homalt | 100.0000 | 100.0000 | 100.0000 | 66.6667 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_AllRepeats_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.7361 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 91.6667 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.7283 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt101bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 71.6049 | 19 | 0 | 23 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 46.1538 | 12 | 0 | 14 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 46.9849 | 211 | 0 | 211 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_SimpleRepeat_homopolymer_6to10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 54.2857 | 12 | 0 | 16 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_SimpleRepeat_homopolymer_6to10 | homalt | 100.0000 | 100.0000 | 100.0000 | 83.3333 | 23 | 0 | 23 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_SimpleRepeat_homopolymer_gt10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 91.8182 | 9 | 0 | 9 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_SimpleRepeat_homopolymer_gt10 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.4533 | 15 | 0 | 15 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | lowcmp_SimpleRepeat_triTR_11to50 | homalt | 100.0000 | 100.0000 | 100.0000 | 63.4146 | 45 | 0 | 45 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l100_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 87.5000 | 4 | 0 | 5 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 94.7368 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m0_e0 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.0588 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.2857 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l125_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l150_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 95.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l150_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 95.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3684 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3684 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | segdup | homalt | 100.0000 | 100.0000 | 100.0000 | 95.6835 | 12 | 0 | 12 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | * | 100.0000 | 100.0000 | 100.0000 | 42.8571 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | het | 100.0000 | 100.0000 | 100.0000 | 0.0000 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | * | 100.0000 | 100.0000 | 100.0000 | 99.9263 | 4 | 0 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | het | 100.0000 | 100.0000 | 100.0000 | 99.9456 | 2 | 0 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.5885 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9339 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | D1_5 | func_cds | * | 100.0000 | 100.0000 | 100.0000 | 38.9313 | 159 | 0 | 160 | 0 | 0 |