PrecisionFDA
Truth Challenge
Engage and improve DNA test results with our community challenges
Explore HG002 comparison results
Use this interactive explorer to filter all results across submission entries and multiple dimensions.
Entry | Type | Subtype | Subset | Genotype | F-score | Recall | Precision | Frac_NA | Truth TP | Truth FN | Query TP | Query FP | FP gt | % FP ma | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
74401-74450 / 86044 show all | |||||||||||||||
qzeng-custom | SNP | tv | map_l250_m1_e0 | hetalt | 40.0000 | 25.0000 | 100.0000 | 99.0991 | 1 | 3 | 1 | 0 | 0 | ||
qzeng-custom | SNP | tv | map_l250_m2_e0 | hetalt | 57.1429 | 40.0000 | 100.0000 | 98.3740 | 2 | 3 | 2 | 0 | 0 | ||
qzeng-custom | SNP | tv | map_l250_m2_e1 | hetalt | 57.1429 | 40.0000 | 100.0000 | 98.3871 | 2 | 3 | 2 | 0 | 0 | ||
qzeng-custom | SNP | tv | map_siren | hetalt | 86.0912 | 76.5432 | 98.3607 | 83.9474 | 62 | 19 | 60 | 1 | 0 | 0.0000 | |
qzeng-custom | SNP | tv | segdup | hetalt | 100.0000 | 100.0000 | 100.0000 | 97.9228 | 7 | 0 | 7 | 0 | 0 | ||
qzeng-custom | SNP | tv | segdupwithalt | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
qzeng-custom | SNP | tv | segdupwithalt | het | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
qzeng-custom | SNP | tv | segdupwithalt | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
qzeng-custom | SNP | tv | segdupwithalt | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
qzeng-custom | SNP | tv | tech_badpromoters | het | 91.4286 | 96.9697 | 86.4865 | 51.9481 | 32 | 1 | 32 | 5 | 0 | 0.0000 | |
qzeng-custom | SNP | tv | tech_badpromoters | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | INDEL | * | decoy | * | 100.0000 | 100.0000 | 100.0000 | 99.9200 | 10 | 0 | 10 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | decoy | het | 100.0000 | 100.0000 | 100.0000 | 99.9259 | 6 | 0 | 6 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | decoy | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.8020 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | decoy | homalt | 100.0000 | 100.0000 | 100.0000 | 99.9232 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | func_cds | * | 99.2118 | 98.8764 | 99.5495 | 41.6557 | 440 | 5 | 442 | 2 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | func_cds | het | 98.5959 | 98.1308 | 99.0654 | 45.9596 | 210 | 4 | 212 | 2 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | func_cds | hetalt | 88.8889 | 80.0000 | 100.0000 | 60.0000 | 4 | 1 | 4 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | func_cds | homalt | 100.0000 | 100.0000 | 100.0000 | 36.3380 | 226 | 0 | 226 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | * | 92.3077 | 90.0000 | 94.7368 | 99.2146 | 18 | 2 | 18 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | het | 95.6522 | 91.6667 | 100.0000 | 99.2450 | 11 | 1 | 11 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 96.2500 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_AllRepeats_gt200bp_gt95identity_merged | homalt | 80.0000 | 80.0000 | 80.0000 | 99.4331 | 4 | 1 | 4 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_51to200bp_gt95identity_merged | hetalt | 90.1316 | 82.0359 | 100.0000 | 68.2540 | 137 | 30 | 140 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | * | 94.1176 | 94.1176 | 94.1176 | 99.2682 | 16 | 1 | 16 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | het | 100.0000 | 100.0000 | 100.0000 | 99.2816 | 10 | 0 | 10 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | hetalt | 100.0000 | 100.0000 | 100.0000 | 95.7143 | 3 | 0 | 3 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_gt200bp_gt95identity_merged | homalt | 75.0000 | 75.0000 | 75.0000 | 99.5354 | 3 | 1 | 3 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt101bp_gt95identity_merged | hetalt | 96.6741 | 93.5622 | 100.0000 | 75.4425 | 218 | 15 | 222 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRgt6_lt51bp_gt95identity_merged | hetalt | 97.6744 | 95.4545 | 100.0000 | 78.8079 | 126 | 6 | 128 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | * | 80.0000 | 66.6667 | 100.0000 | 97.9167 | 2 | 1 | 2 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | het | 66.6667 | 50.0000 | 100.0000 | 98.4615 | 1 | 1 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | hetalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | INDEL | * | lowcmp_Human_Full_Genome_TRDB_hg19_150331_TRlt7_gt200bp_gt95identity_merged | homalt | 100.0000 | 100.0000 | 100.0000 | 95.2381 | 1 | 0 | 1 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_diTR_51to200 | hetalt | 82.8264 | 70.6869 | 100.0000 | 30.6648 | 885 | 367 | 970 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_diTR_gt200 | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_diTR_gt200 | het | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_diTR_gt200 | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_diTR_gt200 | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_homopolymer_6to10 | hetalt | 97.6077 | 95.3271 | 100.0000 | 72.7898 | 510 | 25 | 514 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | * | 87.8883 | 79.0323 | 98.9796 | 99.9286 | 98 | 26 | 97 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | het | 82.4324 | 70.1149 | 100.0000 | 99.9146 | 61 | 26 | 60 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | hetalt | 100.0000 | 100.0000 | 100.0000 | 99.8747 | 16 | 0 | 16 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_homopolymer_gt10 | homalt | 97.6744 | 100.0000 | 95.4545 | 99.9595 | 21 | 0 | 21 | 1 | 0 | 0.0000 | |
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_quadTR_11to50 | hetalt | 98.3128 | 96.6816 | 100.0000 | 40.4561 | 2593 | 89 | 2611 | 0 | 0 | ||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_quadTR_gt200 | * | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_quadTR_gt200 | het | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_quadTR_gt200 | hetalt | 0.0000 | 0.0000 | 0.0000 | 0 | 0 | 0 | 0 | 0 | |||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_quadTR_gt200 | homalt | 0.0000 | 100.0000 | 0 | 0 | 0 | 0 | 0 | ||||
raldana-dualsentieon | INDEL | * | lowcmp_SimpleRepeat_triTR_11to50 | hetalt | 99.0153 | 98.0498 | 100.0000 | 28.0159 | 905 | 18 | 907 | 0 | 0 |