PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
651-700 / 86044 show all | |||||||||||||||
astatham-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | * | 94.0371 | 93.4243 | 94.6580 | 64.2218 | 5683 | 400 | 5564 | 314 | 298 | 94.9045 | |
astatham-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 97.7612 | 3 | 0 | 3 | 0 | 0 | ||
astatham-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt101bp_gt95identity_merged | * | 97.6838 | 97.4223 | 97.9467 | 56.1751 | 36925 | 977 | 36731 | 770 | 739 | 95.9740 | |
astatham-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt51bp_gt95identity_merged | * | 98.3493 | 98.1589 | 98.5404 | 55.2323 | 31403 | 589 | 31326 | 464 | 446 | 96.1207 | |
astatham-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_gt95identity_merged | * | 97.8510 | 97.6024 | 98.1008 | 61.8066 | 42296 | 1039 | 42098 | 815 | 759 | 93.1288 | |
astatham-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_merged | * | 98.2824 | 98.0398 | 98.5261 | 67.2886 | 64070 | 1281 | 63840 | 955 | 849 | 88.9005 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_diTR_11to50 | * | 98.5463 | 98.3740 | 98.7193 | 52.5147 | 35997 | 595 | 35921 | 466 | 435 | 93.3476 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_diTR_51to200 | * | 85.8405 | 84.9119 | 86.7896 | 57.5804 | 1784 | 317 | 1695 | 258 | 252 | 97.6744 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_homopolymer_6to10 | * | 99.8087 | 99.7134 | 99.9043 | 58.1701 | 28179 | 81 | 28181 | 27 | 15 | 55.5556 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | * | 94.7563 | 95.1613 | 94.3548 | 99.9176 | 118 | 6 | 117 | 7 | 0 | 0.0000 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_quadTR_11to50 | * | 99.5288 | 99.4462 | 99.6115 | 59.5139 | 19752 | 110 | 19745 | 77 | 49 | 63.6364 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_quadTR_51to200 | * | 97.6175 | 97.2128 | 98.0256 | 69.1545 | 2581 | 74 | 2532 | 51 | 39 | 76.4706 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_quadTR_gt200 | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_triTR_11to50 | * | 99.7622 | 99.6881 | 99.8364 | 49.1224 | 6712 | 21 | 6714 | 11 | 6 | 54.5455 | |
astatham-gatk | INDEL | * | lowcmp_SimpleRepeat_triTR_51to200 | * | 96.1232 | 95.4955 | 96.7593 | 64.9351 | 212 | 10 | 209 | 7 | 4 | 57.1429 | |
astatham-gatk | INDEL | * | map_l100_m0_e0 | * | 96.7251 | 96.2892 | 97.1649 | 87.6728 | 1505 | 58 | 1508 | 44 | 9 | 20.4545 | |
astatham-gatk | INDEL | * | map_l100_m1_e0 | * | 96.5907 | 95.1478 | 98.0780 | 85.9214 | 3412 | 174 | 3419 | 67 | 17 | 25.3731 | |
astatham-gatk | INDEL | * | map_l100_m2_e0 | * | 96.5801 | 95.1530 | 98.0507 | 86.7138 | 3514 | 179 | 3521 | 70 | 18 | 25.7143 | |
astatham-gatk | INDEL | * | map_l100_m2_e1 | * | 96.5544 | 95.0745 | 98.0811 | 86.7793 | 3571 | 185 | 3578 | 70 | 18 | 25.7143 | |
astatham-gatk | INDEL | * | map_l125_m0_e0 | * | 96.6572 | 96.5986 | 96.7157 | 90.5095 | 852 | 30 | 854 | 29 | 6 | 20.6897 | |
astatham-gatk | INDEL | * | map_l125_m1_e0 | * | 96.6598 | 95.3963 | 97.9572 | 88.3361 | 2010 | 97 | 2014 | 42 | 9 | 21.4286 | |
astatham-gatk | INDEL | * | map_l125_m2_e0 | * | 96.5138 | 95.1275 | 97.9410 | 89.1008 | 2089 | 107 | 2093 | 44 | 9 | 20.4545 | |
astatham-gatk | INDEL | * | map_l125_m2_e1 | * | 96.4891 | 95.0562 | 97.9658 | 89.1866 | 2115 | 110 | 2119 | 44 | 9 | 20.4545 | |
astatham-gatk | INDEL | * | map_l150_m0_e0 | * | 96.2251 | 96.4981 | 95.9538 | 92.9541 | 496 | 18 | 498 | 21 | 4 | 19.0476 | |
astatham-gatk | INDEL | * | map_l150_m1_e0 | * | 96.6569 | 96.0389 | 97.2830 | 90.5512 | 1285 | 53 | 1289 | 36 | 7 | 19.4444 | |
astatham-gatk | INDEL | * | map_l150_m2_e0 | * | 96.6049 | 95.8807 | 97.3400 | 91.1929 | 1350 | 58 | 1354 | 37 | 7 | 18.9189 | |
astatham-gatk | INDEL | * | map_l150_m2_e1 | * | 96.4999 | 95.6915 | 97.3221 | 91.2120 | 1377 | 62 | 1381 | 38 | 8 | 21.0526 | |
astatham-gatk | INDEL | * | map_l250_m0_e0 | * | 90.3614 | 96.1538 | 85.2273 | 97.7873 | 75 | 3 | 75 | 13 | 2 | 15.3846 | |
astatham-gatk | INDEL | * | map_l250_m1_e0 | * | 95.1613 | 96.7213 | 93.6508 | 96.0377 | 295 | 10 | 295 | 20 | 4 | 20.0000 | |
astatham-gatk | INDEL | * | map_l250_m2_e0 | * | 95.3800 | 96.6767 | 94.1176 | 96.2801 | 320 | 11 | 320 | 20 | 4 | 20.0000 | |
astatham-gatk | INDEL | * | map_l250_m2_e1 | * | 95.4074 | 96.6967 | 94.1520 | 96.3590 | 322 | 11 | 322 | 20 | 4 | 20.0000 | |
astatham-gatk | INDEL | * | map_siren | * | 97.4708 | 96.1673 | 98.8100 | 83.5327 | 7126 | 284 | 7141 | 86 | 20 | 23.2558 | |
astatham-gatk | INDEL | * | segdup | * | 98.7115 | 98.8654 | 98.5581 | 94.6872 | 2527 | 29 | 2529 | 37 | 10 | 27.0270 | |
astatham-gatk | INDEL | * | segdupwithalt | * | 100.0000 | 100.0000 | 100.0000 | 99.9974 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | INDEL | * | tech_badpromoters | * | 99.3377 | 98.6842 | 100.0000 | 55.0898 | 75 | 1 | 75 | 0 | 0 | ||
astatham-gatk | INDEL | C16_PLUS | * | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | HG002complexvar | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | HG002compoundhet | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | decoy | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | func_cds | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_AllRepeats_51to200bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_AllRepeats_gt200bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_AllRepeats_lt51bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331 | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_51to200bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt101bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
astatham-gatk | INDEL | C16_PLUS | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 |