PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
74201-74250 / 86044 show all | |||||||||||||||
astatham-gatk | SNP | tv | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_51to200bp_gt95identity_merged | homalt | 98.7654 | 97.5610 | 100.0000 | 91.8699 | 40 | 1 | 40 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt101bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 90.9091 | 2 | 0 | 2 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_lt51bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.4737 | 2 | 0 | 2 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 92.6829 | 3 | 0 | 3 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_Human_Full_Genome_TRDB_hg19_150331_all_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 86.6071 | 15 | 0 | 15 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_diTR_11to50 | hetalt | 100.0000 | 100.0000 | 100.0000 | 95.6522 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_diTR_51to200 | * | 98.0392 | 96.1538 | 100.0000 | 96.8153 | 25 | 1 | 25 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_diTR_51to200 | het | 96.9697 | 94.1176 | 100.0000 | 97.2835 | 16 | 1 | 16 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_diTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 95.3608 | 9 | 0 | 9 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_homopolymer_6to10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 68.7500 | 5 | 0 | 5 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_quadTR_11to50 | hetalt | 100.0000 | 100.0000 | 100.0000 | 64.2857 | 5 | 0 | 5 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_quadTR_11to50 | homalt | 99.8911 | 99.7825 | 100.0000 | 36.2077 | 2752 | 6 | 2752 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_quadTR_51to200 | homalt | 100.0000 | 100.0000 | 100.0000 | 93.4066 | 6 | 0 | 6 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_triTR_11to50 | homalt | 99.7323 | 99.4661 | 100.0000 | 34.5710 | 1304 | 7 | 1304 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_triTR_51to200 | * | 100.0000 | 100.0000 | 100.0000 | 98.5714 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | tv | lowcmp_SimpleRepeat_triTR_51to200 | het | 100.0000 | 100.0000 | 100.0000 | 98.2456 | 1 | 0 | 1 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l100_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 68.0000 | 16 | 0 | 16 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l100_m1_e0 | hetalt | 98.7654 | 97.5610 | 100.0000 | 69.9248 | 40 | 1 | 40 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l100_m2_e0 | hetalt | 98.7952 | 97.6190 | 100.0000 | 72.2973 | 41 | 1 | 41 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l100_m2_e1 | hetalt | 98.8235 | 97.6744 | 100.0000 | 71.8121 | 42 | 1 | 42 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l125_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 75.6757 | 9 | 0 | 9 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l125_m1_e0 | hetalt | 98.3051 | 96.6667 | 100.0000 | 71.0000 | 29 | 1 | 29 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l125_m2_e0 | hetalt | 98.3051 | 96.6667 | 100.0000 | 75.6303 | 29 | 1 | 29 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l125_m2_e1 | hetalt | 98.3051 | 96.6667 | 100.0000 | 75.6303 | 29 | 1 | 29 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l150_m0_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.2857 | 3 | 0 | 3 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l150_m1_e0 | hetalt | 97.4359 | 95.0000 | 100.0000 | 75.9494 | 19 | 1 | 19 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l150_m2_e0 | hetalt | 97.4359 | 95.0000 | 100.0000 | 79.5699 | 19 | 1 | 19 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l150_m2_e1 | hetalt | 97.4359 | 95.0000 | 100.0000 | 79.5699 | 19 | 1 | 19 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l250_m1_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.7436 | 4 | 0 | 4 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l250_m2_e0 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.3617 | 5 | 0 | 5 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_l250_m2_e1 | hetalt | 100.0000 | 100.0000 | 100.0000 | 89.3617 | 5 | 0 | 5 | 0 | 0 | ||
astatham-gatk | SNP | tv | map_siren | hetalt | 98.7500 | 97.5309 | 100.0000 | 69.1406 | 79 | 2 | 79 | 0 | 0 | ||
astatham-gatk | SNP | tv | segdup | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.6825 | 7 | 0 | 7 | 0 | 0 | ||
astatham-gatk | SNP | tv | tech_badpromoters | het | 96.8750 | 93.9394 | 100.0000 | 55.7143 | 31 | 2 | 31 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | decoy | * | 100.0000 | 100.0000 | 100.0000 | 99.9865 | 10 | 0 | 10 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | decoy | het | 100.0000 | 100.0000 | 100.0000 | 99.9667 | 6 | 0 | 6 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | decoy | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.8285 | 1 | 0 | 1 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9307 | 3 | 0 | 3 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | func_cds | hetalt | 88.8889 | 80.0000 | 100.0000 | 63.6364 | 4 | 1 | 4 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 99.7412 | 20 | 0 | 21 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.4015 | 12 | 0 | 13 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.9697 | 3 | 0 | 3 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 99.5238 | 5 | 0 | 5 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 99.7732 | 17 | 0 | 18 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.4714 | 10 | 0 | 11 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.5909 | 3 | 0 | 3 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 99.6090 | 4 | 0 | 4 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 100.0000 | 100.0000 | 100.0000 | 98.3516 | 3 | 0 | 3 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 97.8495 | 2 | 0 | 2 | 0 | 0 | ||
asubramanian-gatk | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 96.2963 | 1 | 0 | 1 | 0 | 0 |