PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
| Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
48551-48600 / 86044 show all | |||||||||||||||
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m1_e0 | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | * | 83.3333 | 100.0000 | 71.4286 | 95.3020 | 5 | 0 | 5 | 2 | 0 | 0.0000 | |
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | het | 75.0000 | 100.0000 | 60.0000 | 95.0495 | 3 | 0 | 3 | 2 | 0 | 0.0000 | |
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e0 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3684 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | * | 83.3333 | 100.0000 | 71.4286 | 95.3333 | 5 | 0 | 5 | 2 | 0 | 0.0000 | |
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | het | 75.0000 | 100.0000 | 60.0000 | 95.0980 | 3 | 0 | 3 | 2 | 0 | 0.0000 | |
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.0000 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D16_PLUS | map_l250_m2_e1 | homalt | 100.0000 | 100.0000 | 100.0000 | 97.3684 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D16_PLUS | segdup | homalt | 100.0000 | 100.0000 | 100.0000 | 95.6835 | 12 | 0 | 12 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D16_PLUS | segdupwithalt | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D16_PLUS | segdupwithalt | het | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D16_PLUS | segdupwithalt | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D16_PLUS | segdupwithalt | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | * | 100.0000 | 100.0000 | 100.0000 | 42.8571 | 4 | 0 | 4 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | het | 100.0000 | 100.0000 | 100.0000 | 0.0000 | 4 | 0 | 4 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
| raldana-dualsentieon | INDEL | D16_PLUS | tech_badpromoters | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | decoy | * | 100.0000 | 100.0000 | 100.0000 | 99.9263 | 4 | 0 | 4 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | decoy | het | 100.0000 | 100.0000 | 100.0000 | 99.9456 | 2 | 0 | 2 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | decoy | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.5885 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9339 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | func_cds | * | 100.0000 | 100.0000 | 100.0000 | 38.9313 | 159 | 0 | 160 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | func_cds | het | 100.0000 | 100.0000 | 100.0000 | 44.8718 | 85 | 0 | 86 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | func_cds | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | func_cds | homalt | 100.0000 | 100.0000 | 100.0000 | 29.5238 | 74 | 0 | 74 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 97.1429 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_AllRepeats_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 99.2674 | 2 | 0 | 2 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 98.6755 | 10 | 0 | 10 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 98.4749 | 7 | 0 | 7 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.6667 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 99.2481 | 2 | 0 | 2 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | homalt | 99.8647 | 100.0000 | 99.7297 | 81.5645 | 369 | 0 | 369 | 1 | 1 | 100.0000 | |
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_diTR_gt200 | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_diTR_gt200 | het | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_diTR_gt200 | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_diTR_gt200 | homalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_homopolymer_gt10 | * | 100.0000 | 100.0000 | 100.0000 | 99.9991 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_homopolymer_gt10 | het | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_homopolymer_gt10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.9914 | 1 | 0 | 1 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_homopolymer_gt10 | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_quadTR_11to50 | homalt | 99.8638 | 100.0000 | 99.7280 | 50.5648 | 3666 | 0 | 3666 | 10 | 10 | 100.0000 | |
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_quadTR_gt200 | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_quadTR_gt200 | het | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_quadTR_gt200 | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_quadTR_gt200 | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_triTR_11to50 | homalt | 100.0000 | 100.0000 | 100.0000 | 40.5454 | 1330 | 0 | 1330 | 0 | 0 | ||
| raldana-dualsentieon | INDEL | D1_5 | lowcmp_SimpleRepeat_triTR_51to200 | homalt | 92.8571 | 100.0000 | 86.6667 | 57.1429 | 13 | 0 | 13 | 2 | 2 | 100.0000 | |