Skip to content

Commit

Permalink
update gte-modernbert-base (#111)
Browse files Browse the repository at this point in the history
  • Loading branch information
afalf authored Feb 7, 2025
1 parent 777d539 commit e70c872
Show file tree
Hide file tree
Showing 78 changed files with 9,135 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
"task_name": "AmazonCounterfactualClassification",
"mteb_version": "1.29.7",
"scores": {
"test": [
{
"accuracy": 0.774776,
"f1": 0.712683,
"f1_weighted": 0.793739,
"ap": 0.404116,
"ap_weighted": 0.404116,
"scores_per_experiment": [
{
"accuracy": 0.786567,
"f1": 0.717657,
"f1_weighted": 0.802598,
"ap": 0.40172,
"ap_weighted": 0.40172
},
{
"accuracy": 0.832836,
"f1": 0.772042,
"f1_weighted": 0.843729,
"ap": 0.481723,
"ap_weighted": 0.481723
},
{
"accuracy": 0.740299,
"f1": 0.679405,
"f1_weighted": 0.764489,
"ap": 0.364821,
"ap_weighted": 0.364821
},
{
"accuracy": 0.786567,
"f1": 0.716548,
"f1_weighted": 0.802337,
"ap": 0.399409,
"ap_weighted": 0.399409
},
{
"accuracy": 0.791045,
"f1": 0.729293,
"f1_weighted": 0.808027,
"ap": 0.4231,
"ap_weighted": 0.4231
},
{
"accuracy": 0.759701,
"f1": 0.702879,
"f1_weighted": 0.782004,
"ap": 0.396272,
"ap_weighted": 0.396272
},
{
"accuracy": 0.81194,
"f1": 0.749751,
"f1_weighted": 0.825719,
"ap": 0.449344,
"ap_weighted": 0.449344
},
{
"accuracy": 0.777612,
"f1": 0.70581,
"f1_weighted": 0.794315,
"ap": 0.384948,
"ap_weighted": 0.384948
},
{
"accuracy": 0.738806,
"f1": 0.686808,
"f1_weighted": 0.764519,
"ap": 0.383435,
"ap_weighted": 0.383435
},
{
"accuracy": 0.722388,
"f1": 0.666635,
"f1_weighted": 0.749654,
"ap": 0.356392,
"ap_weighted": 0.356392
}
],
"main_score": 0.774776,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 7.348646640777588,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"dataset_revision": "e2d317d38cd51312af73b3d32a06d1a08b442046",
"task_name": "AmazonPolarityClassification",
"mteb_version": "1.29.7",
"scores": {
"test": [
{
"accuracy": 0.931718,
"f1": 0.931546,
"f1_weighted": 0.931546,
"ap": 0.899049,
"ap_weighted": 0.899049,
"scores_per_experiment": [
{
"accuracy": 0.941712,
"f1": 0.941674,
"f1_weighted": 0.941674,
"ap": 0.906392,
"ap_weighted": 0.906392
},
{
"accuracy": 0.937562,
"f1": 0.937526,
"f1_weighted": 0.937526,
"ap": 0.901397,
"ap_weighted": 0.901397
},
{
"accuracy": 0.943928,
"f1": 0.943917,
"f1_weighted": 0.943917,
"ap": 0.924645,
"ap_weighted": 0.924645
},
{
"accuracy": 0.914325,
"f1": 0.914016,
"f1_weighted": 0.914016,
"ap": 0.902212,
"ap_weighted": 0.902212
},
{
"accuracy": 0.930848,
"f1": 0.930693,
"f1_weighted": 0.930693,
"ap": 0.9204,
"ap_weighted": 0.9204
},
{
"accuracy": 0.920752,
"f1": 0.920531,
"f1_weighted": 0.920531,
"ap": 0.870495,
"ap_weighted": 0.870495
},
{
"accuracy": 0.941295,
"f1": 0.941281,
"f1_weighted": 0.941281,
"ap": 0.909586,
"ap_weighted": 0.909586
},
{
"accuracy": 0.947608,
"f1": 0.947583,
"f1_weighted": 0.947583,
"ap": 0.915868,
"ap_weighted": 0.915868
},
{
"accuracy": 0.940945,
"f1": 0.940917,
"f1_weighted": 0.940917,
"ap": 0.90673,
"ap_weighted": 0.90673
},
{
"accuracy": 0.89821,
"f1": 0.897318,
"f1_weighted": 0.897318,
"ap": 0.832765,
"ap_weighted": 0.832765
}
],
"main_score": 0.931718,
"hf_subset": "default",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 130.29326963424683,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
"task_name": "AmazonReviewsClassification",
"mteb_version": "1.29.7",
"scores": {
"test": [
{
"accuracy": 0.53572,
"f1": 0.530963,
"f1_weighted": 0.530963,
"scores_per_experiment": [
{
"accuracy": 0.5258,
"f1": 0.519506,
"f1_weighted": 0.519506
},
{
"accuracy": 0.541,
"f1": 0.538943,
"f1_weighted": 0.538943
},
{
"accuracy": 0.5242,
"f1": 0.525775,
"f1_weighted": 0.525775
},
{
"accuracy": 0.5452,
"f1": 0.538755,
"f1_weighted": 0.538755
},
{
"accuracy": 0.5428,
"f1": 0.527215,
"f1_weighted": 0.527215
},
{
"accuracy": 0.5346,
"f1": 0.53011,
"f1_weighted": 0.53011
},
{
"accuracy": 0.523,
"f1": 0.520411,
"f1_weighted": 0.520411
},
{
"accuracy": 0.5618,
"f1": 0.565189,
"f1_weighted": 0.565189
},
{
"accuracy": 0.5332,
"f1": 0.530218,
"f1_weighted": 0.530218
},
{
"accuracy": 0.5256,
"f1": 0.513505,
"f1_weighted": 0.513505
}
],
"main_score": 0.53572,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 25.778754234313965,
"kg_co2_emissions": null
}
Loading

0 comments on commit e70c872

Please sign in to comment.