Model_name stringclasses 16
values | Train_size int64 50.8k 50.8k | Test_size int64 12.7k 12.7k | arg dict | lora listlengths 1 9 | Parameters int64 110M 1.85B | Trainable_parameters int64 9.27k 1.11B | r int64 4 1.02k | Memory Allocation stringlengths 5 7 | Training Time stringlengths 5 7 | Performance dict |
|---|---|---|---|---|---|---|---|---|---|---|
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"key",
"pooler",
"query",
"value"
] | 223,391,258 | 742,413 | 8 | 2587.94 | 3250.57 | {
"accuracy": 0.8722731583939298,
"f1_macro": 0.862150429106117,
"f1_weighted": 0.8723422017705071,
"precision": 0.8658039080136887,
"recall": 0.8593864161213076
} |
facebook/opt-350m | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"fc1",
"fc2",
"k_proj",
"out_proj",
"project_in",
"project_out",
"q_proj",
"score",
"v_proj"
] | 345,463,808 | 14,260,736 | 32 | 3288.99 | 1654.49 | {
"accuracy": 0.8845241858994626,
"f1_macro": 0.8791726339899201,
"f1_weighted": 0.8846846511262262,
"precision": 0.8798342880590895,
"recall": 0.8787539901529715
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 568,044,570 | 8,140,813 | 16 | 5030.66 | 2209.58 | {
"accuracy": 0.889661713563073,
"f1_macro": 0.8848712853323799,
"f1_weighted": 0.8898577947681364,
"precision": 0.8870786826880624,
"recall": 0.8830419063481568
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 370,591,770 | 15,218,701 | 32 | 3508.76 | 1661.64 | {
"accuracy": 0.89116345241859,
"f1_macro": 0.885951353706553,
"f1_weighted": 0.8914312375722376,
"precision": 0.886857455081974,
"recall": 0.8853286180328447
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 422,033,434 | 15,807,501 | 32 | 3792.82 | 1905.48 | {
"accuracy": 0.8889503635788808,
"f1_macro": 0.8817753174512746,
"f1_weighted": 0.8891663020744098,
"precision": 0.8831525856714956,
"recall": 0.8807908131673957
} |
google-t5/t5-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"k",
"o",
"out_proj",
"q",
"v",
"wi",
"wo"
] | 756,081,885 | 17,350,864 | 16 | 2361.3 | 2706.95 | {
"accuracy": 0.82042364843503,
"f1_macro": 0.7833049300677861,
"f1_weighted": 0.8144741005682286,
"precision": 0.8180956093532021,
"recall": 0.7822013600956367
} |
Qwen/Qwen3-Reranker-0.6B | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"score",
"up_proj",
"v_proj"
] | 636,173,312 | 40,383,488 | 64 | 4360.82 | 1491.26 | {
"accuracy": 0.8875276636104964,
"f1_macro": 0.881302318996792,
"f1_weighted": 0.8876169297855617,
"precision": 0.8833784383227032,
"recall": 0.879686542527803
} |
facebook/opt-350m | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"fc1",
"fc2",
"k_proj",
"out_proj",
"project_in",
"project_out",
"q_proj",
"score",
"v_proj"
] | 359,717,888 | 28,514,816 | 64 | 3501.25 | 1725.59 | {
"accuracy": 0.8891084413531457,
"f1_macro": 0.884251353876662,
"f1_weighted": 0.8893208862840672,
"precision": 0.8852089687922547,
"recall": 0.8835820879208923
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 384,747,546 | 29,374,477 | 64 | 3717.72 | 1734.12 | {
"accuracy": 0.8947992412266835,
"f1_macro": 0.8899736041154382,
"f1_weighted": 0.8950306912490653,
"precision": 0.8906489322663366,
"recall": 0.8895635950169705
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 575,122,458 | 15,218,701 | 32 | 5144.48 | 2235.28 | {
"accuracy": 0.8925071134998419,
"f1_macro": 0.8879024011502923,
"f1_weighted": 0.8926429337051776,
"precision": 0.8897393465596863,
"recall": 0.8863225423457599
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 437,827,610 | 31,601,677 | 64 | 4058.01 | 1973.96 | {
"accuracy": 0.8944040467910211,
"f1_macro": 0.888322020211215,
"f1_weighted": 0.8946660494482694,
"precision": 0.8889590332743614,
"recall": 0.8879002544876452
} |
Qwen/Qwen3-Reranker-0.6B | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"score",
"up_proj",
"v_proj"
] | 676,543,488 | 80,753,664 | 128 | 5036.09 | 1592.53 | {
"accuracy": 0.8925861523869744,
"f1_macro": 0.8868377941363138,
"f1_weighted": 0.8926828164233866,
"precision": 0.8886165937671825,
"recall": 0.885368684957202
} |
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"key",
"pooler",
"query",
"value"
] | 224,080,410 | 1,431,565 | 16 | 2593.0 | 3261.23 | {
"accuracy": 0.8821530192854885,
"f1_macro": 0.874794659895967,
"f1_weighted": 0.8823534689722649,
"precision": 0.8770079907164144,
"recall": 0.8731264865543866
} |
facebook/opt-350m | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"fc1",
"fc2",
"k_proj",
"out_proj",
"project_in",
"project_out",
"q_proj",
"score",
"v_proj"
] | 388,226,048 | 57,022,976 | 128 | 3932.16 | 1872.85 | {
"accuracy": 0.8920328801770471,
"f1_macro": 0.8868732981442506,
"f1_weighted": 0.8922332162153938,
"precision": 0.8877692584168926,
"recall": 0.8862711257051747
} |
google-t5/t5-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"k",
"o",
"out_proj",
"q",
"v",
"wi",
"wo"
] | 773,432,749 | 34,701,728 | 32 | 2475.04 | 2700.75 | {
"accuracy": 0.8593107809042049,
"f1_macro": 0.8440172091473165,
"f1_weighted": 0.8585287294204809,
"precision": 0.8532288071809823,
"recall": 0.8396354174184749
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 413,059,098 | 57,686,029 | 128 | 4149.31 | 1883.44 | {
"accuracy": 0.8980398355991147,
"f1_macro": 0.8933785290970504,
"f1_weighted": 0.8982180255485378,
"precision": 0.8943152546471684,
"recall": 0.8926576739287886
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 589,278,234 | 29,374,477 | 64 | 5353.26 | 2310.3 | {
"accuracy": 0.8963009800822005,
"f1_macro": 0.8929057205031824,
"f1_weighted": 0.8964683685160553,
"precision": 0.8940441682799969,
"recall": 0.8919755114442118
} |
Qwen/Qwen3-Reranker-0.6B | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"score",
"up_proj",
"v_proj"
] | 757,283,840 | 161,494,016 | 256 | 6152.73 | 1764.92 | {
"accuracy": 0.8938507745810939,
"f1_macro": 0.8888681143587468,
"f1_weighted": 0.8940071272374931,
"precision": 0.8908125929555937,
"recall": 0.8872898702169905
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 469,415,962 | 63,190,029 | 128 | 4575.04 | 2116.9 | {
"accuracy": 0.9004110022130888,
"f1_macro": 0.8957841401977072,
"f1_weighted": 0.9005870758571973,
"precision": 0.8966880236436467,
"recall": 0.8950678952185568
} |
facebook/opt-350m | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"fc1",
"fc2",
"k_proj",
"out_proj",
"project_in",
"project_out",
"q_proj",
"score",
"v_proj"
] | 445,242,368 | 114,039,296 | 256 | 4697.46 | 2132.45 | {
"accuracy": 0.8953525134366108,
"f1_macro": 0.8910942257195085,
"f1_weighted": 0.8955565005279739,
"precision": 0.8922545500690322,
"recall": 0.8901897326636318
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 469,682,202 | 114,309,133 | 256 | 4939.87 | 2136.19 | {
"accuracy": 0.8983559911476446,
"f1_macro": 0.8934100039534362,
"f1_weighted": 0.8985384020635359,
"precision": 0.8942080932451125,
"recall": 0.892813089993333
} |
google-t5/t5-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"k",
"o",
"out_proj",
"q",
"v",
"wi",
"wo"
] | 808,134,477 | 69,403,456 | 64 | 2703.21 | 2738.66 | {
"accuracy": 0.8797818526715144,
"f1_macro": 0.8699565041745851,
"f1_weighted": 0.8800662980988457,
"precision": 0.8729282824509633,
"recall": 0.8679609354136181
} |
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"key",
"pooler",
"query",
"value"
] | 225,458,714 | 2,809,869 | 32 | 2604.14 | 3278.95 | {
"accuracy": 0.8866582358520392,
"f1_macro": 0.8805361394786488,
"f1_weighted": 0.8868472161794123,
"precision": 0.8821746780557068,
"recall": 0.8792168958629811
} |
Qwen/Qwen3-Reranker-0.6B | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"score",
"up_proj",
"v_proj"
] | 918,764,544 | 322,974,720 | 512 | 8024.28 | 2170.38 | {
"accuracy": 0.8966961745178628,
"f1_macro": 0.8918835728777867,
"f1_weighted": 0.896871052710265,
"precision": 0.8933411271456468,
"recall": 0.8907348982342026
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 617,589,786 | 57,686,029 | 128 | 5781.26 | 2466.76 | {
"accuracy": 0.8970123300663927,
"f1_macro": 0.8932253980605369,
"f1_weighted": 0.8972005164623214,
"precision": 0.8944153044334617,
"recall": 0.8922802938128591
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 532,592,666 | 126,366,733 | 256 | 5391.17 | 2386.94 | {
"accuracy": 0.9015965855200759,
"f1_macro": 0.8973526640185696,
"f1_weighted": 0.9018252959007775,
"precision": 0.8984521946549535,
"recall": 0.8965587386239398
} |
facebook/opt-350m | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"fc1",
"fc2",
"k_proj",
"out_proj",
"project_in",
"project_out",
"q_proj",
"score",
"v_proj"
] | 559,275,008 | 228,071,936 | 512 | 6303.52 | 2681.3 | {
"accuracy": 0.896063863420803,
"f1_macro": 0.8914847709874795,
"f1_weighted": 0.8962464034480218,
"precision": 0.8925647716667204,
"recall": 0.8907047374890307
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 582,928,410 | 227,555,341 | 512 | 6516.59 | 2688.97 | {
"accuracy": 0.902466013278533,
"f1_macro": 0.8982951782674592,
"f1_weighted": 0.9026722067520413,
"precision": 0.898378896740835,
"recall": 0.8984416803597521
} |
google-t5/t5-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"k",
"o",
"out_proj",
"q",
"v",
"wi",
"wo"
] | 877,537,933 | 138,806,912 | 128 | 3194.29 | 2737.1 | {
"accuracy": 0.8878438191590262,
"f1_macro": 0.8807570808340444,
"f1_weighted": 0.8882097376153891,
"precision": 0.8827566138741335,
"recall": 0.8794871258073358
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 674,212,890 | 114,309,133 | 256 | 6556.64 | 2727.88 | {
"accuracy": 0.8993044577932343,
"f1_macro": 0.8953449229210466,
"f1_weighted": 0.8996268958775625,
"precision": 0.8959634381136734,
"recall": 0.8951316615673002
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 658,946,074 | 252,720,141 | 512 | 7180.6 | 2927.03 | {
"accuracy": 0.9049162187796396,
"f1_macro": 0.901107386190174,
"f1_weighted": 0.9051770409666084,
"precision": 0.901329006868039,
"recall": 0.9011179531520016
} |
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"key",
"pooler",
"query",
"value"
] | 228,215,322 | 5,566,477 | 64 | 2627.37 | 3338.35 | {
"accuracy": 0.8893455580145432,
"f1_macro": 0.8831911868614246,
"f1_weighted": 0.8895443767065669,
"precision": 0.8843768003085015,
"recall": 0.8822588568756525
} |
google-t5/t5-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"k",
"o",
"out_proj",
"q",
"v",
"wi",
"wo"
] | 1,016,344,845 | 277,613,824 | 256 | 4094.39 | 2784.5 | {
"accuracy": 0.8972494467277901,
"f1_macro": 0.8914586129914629,
"f1_weighted": 0.8975192403879136,
"precision": 0.8923177953104979,
"recall": 0.8909761994005975
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 787,459,098 | 227,555,341 | 512 | 8153.74 | 3294.49 | {
"accuracy": 0.9019127410686057,
"f1_macro": 0.8987907740359878,
"f1_weighted": 0.9020500820267734,
"precision": 0.8993803181920761,
"recall": 0.8983952376220622
} |
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"key",
"pooler",
"query",
"value"
] | 233,728,538 | 11,079,693 | 128 | 2669.5 | 3444.4 | {
"accuracy": 0.8938507745810939,
"f1_macro": 0.888144143643366,
"f1_weighted": 0.8940975232304162,
"precision": 0.8890617363540988,
"recall": 0.8875081136269234
} |
google-t5/t5-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"k",
"o",
"out_proj",
"q",
"v",
"wi",
"wo"
] | 1,293,958,669 | 555,227,648 | 512 | 6137.83 | 2885.91 | {
"accuracy": 0.9001738855516914,
"f1_macro": 0.894299046872226,
"f1_weighted": 0.900442163738966,
"precision": 0.8947516066386669,
"recall": 0.8941409421046498
} |
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"key",
"pooler",
"query",
"value"
] | 244,754,970 | 22,106,125 | 256 | 2765.5 | 3658.44 | {
"accuracy": 0.896380018969333,
"f1_macro": 0.8904405976694844,
"f1_weighted": 0.8965934269190462,
"precision": 0.8909890016674086,
"recall": 0.8901285257305411
} |
google-t5/t5-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"k",
"o",
"out_proj",
"q",
"v",
"wi",
"wo"
] | 1,849,186,317 | 1,110,455,296 | 1,024 | 9704.44 | 3808.17 | {
"accuracy": 0.9046000632311098,
"f1_macro": 0.8996896264023401,
"f1_weighted": 0.9048083267959188,
"precision": 0.9001987207094116,
"recall": 0.8994839753259615
} |
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"key",
"pooler",
"query",
"value"
] | 266,807,834 | 44,158,989 | 512 | 2948.18 | 4128.85 | {
"accuracy": 0.8989883022447044,
"f1_macro": 0.8932787242488319,
"f1_weighted": 0.899180989072891,
"precision": 0.8939046353162913,
"recall": 0.8929198305459212
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 125,919,770 | 1,264,141 | 4 | 1391.99 | 649.89 | {
"accuracy": 0.8484824533670566,
"f1_macro": 0.8358528600947209,
"f1_weighted": 0.8484053097256183,
"precision": 0.8414743158781716,
"recall": 0.8322497328689692
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 110,171,930 | 679,693 | 4 | 1260.55 | 604.22 | {
"accuracy": 0.6776003793866582,
"f1_macro": 0.557677468110299,
"f1_weighted": 0.6360221330998432,
"precision": 0.5554900480313727,
"recall": 0.5934898807250314
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 126,583,322 | 1,927,693 | 8 | 1392.71 | 647.49 | {
"accuracy": 0.862867530825166,
"f1_macro": 0.8530823288347613,
"f1_weighted": 0.8629893081564594,
"precision": 0.8579622374628227,
"recall": 0.8495085850791969
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 110,841,626 | 1,349,389 | 8 | 1260.41 | 604.98 | {
"accuracy": 0.7556907998735378,
"f1_macro": 0.6913816116702359,
"f1_weighted": 0.7370133650405578,
"precision": 0.7621957139881789,
"recall": 0.6991076757647363
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 127,910,426 | 3,254,797 | 16 | 1402.33 | 659.61 | {
"accuracy": 0.8700600695542207,
"f1_macro": 0.8617076949119702,
"f1_weighted": 0.8702660794549991,
"precision": 0.8650290907522602,
"recall": 0.8590312471174008
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 409,529,409 | 2,175,028 | 4 | 3931.55 | 1994.23 | {
"accuracy": 0.837100853619981,
"f1_macro": 0.819320002150968,
"f1_weighted": 0.8365214467882119,
"precision": 0.829413114853224,
"recall": 0.8146486251583185
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 112,181,018 | 2,688,781 | 16 | 1271.48 | 614.52 | {
"accuracy": 0.8084097375908947,
"f1_macro": 0.7752610196307308,
"f1_weighted": 0.8023462795387738,
"precision": 0.8061572719439902,
"recall": 0.7713207183347648
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 336,946,202 | 1,790,989 | 4 | 3144.02 | 1682.06 | {
"accuracy": 0.7811413215301929,
"f1_macro": 0.7194840912752345,
"f1_weighted": 0.7656902509079089,
"precision": 0.7873359402685902,
"recall": 0.7243830682157009
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 130,564,634 | 5,909,005 | 32 | 1455.37 | 663.66 | {
"accuracy": 0.8754347138792286,
"f1_macro": 0.8678486993252565,
"f1_weighted": 0.8756572384400854,
"precision": 0.8698494102676955,
"recall": 0.8662188276261766
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 408,213,530 | 1,987,597 | 4 | 3615.83 | 1929.23 | {
"accuracy": 0.8498261144483086,
"f1_macro": 0.8320999499486269,
"f1_weighted": 0.8489416106176354,
"precision": 0.8392730685236397,
"recall": 0.8282218416505667
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 114,859,802 | 5,367,565 | 32 | 1324.87 | 617.48 | {
"accuracy": 0.8453999367688902,
"f1_macro": 0.832204269972286,
"f1_weighted": 0.8452577560841472,
"precision": 0.8415721920329958,
"recall": 0.8270353900732399
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 135,873,050 | 11,217,421 | 64 | 1516.47 | 676.66 | {
"accuracy": 0.8810464748656339,
"f1_macro": 0.8736485406190957,
"f1_weighted": 0.8812054216381876,
"precision": 0.8750473261159748,
"recall": 0.8724947273133411
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 120,217,370 | 10,725,133 | 64 | 1386.91 | 630.21 | {
"accuracy": 0.8607334808725893,
"f1_macro": 0.8507968201349172,
"f1_weighted": 0.8608113855611761,
"precision": 0.8563174244998447,
"recall": 0.8472312217095838
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 338,723,866 | 3,568,653 | 8 | 3164.27 | 1685.52 | {
"accuracy": 0.8390768257982928,
"f1_macro": 0.8215203388033808,
"f1_weighted": 0.8378995988408484,
"precision": 0.8353637674216158,
"recall": 0.8142267320449833
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 146,489,882 | 21,834,253 | 128 | 1560.5 | 702.94 | {
"accuracy": 0.8858678469807145,
"f1_macro": 0.8792843892984809,
"f1_weighted": 0.8859717818352927,
"precision": 0.8805172243986237,
"recall": 0.878249432241028
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 130,932,506 | 21,440,269 | 128 | 1437.65 | 655.95 | {
"accuracy": 0.8727473917167247,
"f1_macro": 0.8657257994511084,
"f1_weighted": 0.8728823790503637,
"precision": 0.8683363006590009,
"recall": 0.863745947792565
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 411,704,437 | 4,350,056 | 8 | 3959.44 | 1961.57 | {
"accuracy": 0.8593107809042049,
"f1_macro": 0.8451630147794547,
"f1_weighted": 0.8592830424157094,
"precision": 0.8497632268604688,
"recall": 0.8425814401047079
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 410,187,802 | 3,961,869 | 8 | 3634.97 | 1931.87 | {
"accuracy": 0.8739329750237117,
"f1_macro": 0.8630749222424812,
"f1_weighted": 0.8739608067544079,
"precision": 0.8660347921507289,
"recall": 0.8612344922001133
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 167,723,546 | 43,067,917 | 256 | 1870.04 | 790.67 | {
"accuracy": 0.8887132469174834,
"f1_macro": 0.8823514130946098,
"f1_weighted": 0.8887802619711761,
"precision": 0.8838996386590717,
"recall": 0.880989032450131
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 152,362,778 | 42,870,541 | 256 | 1746.14 | 738.67 | {
"accuracy": 0.8804141637685742,
"f1_macro": 0.8745999258330277,
"f1_weighted": 0.8805422577690016,
"precision": 0.8765720714268965,
"recall": 0.8731069954938984
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 342,279,194 | 7,123,981 | 16 | 3213.34 | 1700.35 | {
"accuracy": 0.8574928865001581,
"f1_macro": 0.846890119031086,
"f1_weighted": 0.85781495973405,
"precision": 0.8518450581081903,
"recall": 0.8434738422594921
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"key",
"out_proj",
"query",
"value"
] | 210,190,874 | 85,535,245 | 512 | 2378.39 | 962.47 | {
"accuracy": 0.8916376857413848,
"f1_macro": 0.8857860565959809,
"f1_weighted": 0.8917251879556186,
"precision": 0.8869669092700435,
"recall": 0.8847594241994637
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 195,223,322 | 85,731,085 | 512 | 2274.07 | 911.63 | {
"accuracy": 0.88444514701233,
"f1_macro": 0.8791416361373022,
"f1_weighted": 0.8845230747528955,
"precision": 0.8807529967164646,
"recall": 0.8778948114704137
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 416,054,493 | 8,700,112 | 16 | 4012.11 | 1986.69 | {
"accuracy": 0.8701391084413531,
"f1_macro": 0.8590196347959178,
"f1_weighted": 0.8702395584157556,
"precision": 0.8621420732579376,
"recall": 0.857157647104303
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 414,136,346 | 7,910,413 | 16 | 3683.47 | 1944.31 | {
"accuracy": 0.8834966803667405,
"f1_macro": 0.8752151288172615,
"f1_weighted": 0.8836837903837133,
"precision": 0.8774345344074861,
"recall": 0.8735503023918401
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 349,389,850 | 14,234,637 | 32 | 3328.52 | 1726.62 | {
"accuracy": 0.8693487195700285,
"f1_macro": 0.8615475713243064,
"f1_weighted": 0.8698061074222874,
"precision": 0.864004754302695,
"recall": 0.8599117843638718
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 424,754,605 | 17,400,224 | 32 | 4142.27 | 2025.66 | {
"accuracy": 0.8789914638001897,
"f1_macro": 0.8705082136246272,
"f1_weighted": 0.8792583179592576,
"precision": 0.8734569164370962,
"recall": 0.8685444688611441
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 422,033,434 | 15,807,501 | 32 | 3792.82 | 1973.37 | {
"accuracy": 0.8881599747075561,
"f1_macro": 0.880613244415908,
"f1_weighted": 0.8883373228402671,
"precision": 0.8825283842549392,
"recall": 0.8792305171377859
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 363,611,162 | 28,455,949 | 64 | 3536.97 | 1803.29 | {
"accuracy": 0.8771735693961429,
"f1_macro": 0.8699330967814549,
"f1_weighted": 0.8774191862467641,
"precision": 0.8714080281561766,
"recall": 0.8688668759782217
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 442,154,829 | 34,800,448 | 64 | 4405.16 | 2105.68 | {
"accuracy": 0.8857097692064496,
"f1_macro": 0.8789842826741208,
"f1_weighted": 0.8860069708455789,
"precision": 0.8800323440010615,
"recall": 0.8783776772722253
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 437,827,610 | 31,601,677 | 64 | 4058.01 | 2047.57 | {
"accuracy": 0.8944830856781536,
"f1_macro": 0.8888925420693033,
"f1_weighted": 0.8946779369114773,
"precision": 0.8899960829353635,
"recall": 0.8880635347919621
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 392,053,786 | 56,898,573 | 128 | 3975.7 | 1952.99 | {
"accuracy": 0.8834966803667405,
"f1_macro": 0.8783752303231847,
"f1_weighted": 0.88368914760437,
"precision": 0.8794481315532393,
"recall": 0.8776567718517531
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 476,955,277 | 69,600,896 | 128 | 4967.39 | 2298.46 | {
"accuracy": 0.8925071134998419,
"f1_macro": 0.8874563535314081,
"f1_weighted": 0.8927586350868724,
"precision": 0.8879377177918474,
"recall": 0.8873768776967422
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 469,415,962 | 63,190,029 | 128 | 4573.34 | 2199.88 | {
"accuracy": 0.9004900411002214,
"f1_macro": 0.8957679639409909,
"f1_weighted": 0.9006955885096265,
"precision": 0.8967474874901942,
"recall": 0.8950164503701811
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 448,939,034 | 113,783,821 | 256 | 4750.65 | 2213.83 | {
"accuracy": 0.8874486247233639,
"f1_macro": 0.8823049883630817,
"f1_weighted": 0.8876614939241605,
"precision": 0.8829447914365554,
"recall": 0.8819320015084025
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 546,556,173 | 139,201,792 | 256 | 5891.24 | 2603.79 | {
"accuracy": 0.8970123300663927,
"f1_macro": 0.8923001378224603,
"f1_weighted": 0.8971946194525198,
"precision": 0.8927613365832312,
"recall": 0.8921285652029669
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 532,592,666 | 126,366,733 | 256 | 5391.17 | 2487.22 | {
"accuracy": 0.9027031299399304,
"f1_macro": 0.8985249989118346,
"f1_weighted": 0.9029153635967133,
"precision": 0.8995863769514236,
"recall": 0.8977173916793645
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"key",
"query",
"value"
] | 562,709,530 | 227,554,317 | 512 | 6351.86 | 2787.0 | {
"accuracy": 0.8914005690799873,
"f1_macro": 0.8864933880817142,
"f1_weighted": 0.8915764179384021,
"precision": 0.8871029944503552,
"recall": 0.8861094670610424
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"classifier",
"dense",
"in_proj",
"pos_proj",
"pos_q_proj"
] | 658,946,074 | 252,720,141 | 512 | 7180.6 | 3068.94 | {
"accuracy": 0.9066550742965539,
"f1_macro": 0.9027440214865444,
"f1_weighted": 0.9068737680350811,
"precision": 0.9032737095991187,
"recall": 0.9024190025341322
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 685,757,965 | 278,403,584 | 512 | 7867.37 | 3278.36 | {
"accuracy": 0.9001738855516914,
"f1_macro": 0.8959184453049027,
"f1_weighted": 0.9003404184427293,
"precision": 0.8963869391852941,
"recall": 0.8956855215198263
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 408,646,721 | 1,290,292 | 4 | 953.04 | 781.26 | {
"accuracy": 0.845004742333228,
"f1_macro": 0.8291413894360475,
"f1_weighted": 0.8448352931372985,
"precision": 0.8350869160708587,
"recall": 0.8256532089018532
} |
google/flan-t5-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"out_proj"
] | 223,513,409 | 9,268 | 4 | 1828.27 | 794.45 | {
"accuracy": 0.12622510275055326,
"f1_macro": 0.061089496032435624,
"f1_weighted": 0.07495071441669991,
"precision": 0.08000500651141326,
"recall": 0.10210739144307183
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 409,937,013 | 2,580,584 | 8 | 944.26 | 789.93 | {
"accuracy": 0.8638950363578881,
"f1_macro": 0.8525358873374469,
"f1_weighted": 0.8642250368019567,
"precision": 0.8567960191676632,
"recall": 0.8499438528166792
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 412,517,597 | 5,161,168 | 16 | 960.83 | 789.47 | {
"accuracy": 0.8725893139424596,
"f1_macro": 0.8632568652072942,
"f1_weighted": 0.8728727834211805,
"precision": 0.8662148817287414,
"recall": 0.8611576784632347
} |
google/flan-t5-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k",
"o",
"q",
"v",
"wi_0",
"wi_1",
"wo"
] | 225,209,153 | 1,705,012 | 4 | 2649.39 | 1710.46 | {
"accuracy": 0.19680682895984825,
"f1_macro": 0.09519672524418793,
"f1_weighted": 0.11695216801550407,
"precision": 0.11552054112946236,
"recall": 0.15800505790358427
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 417,678,765 | 10,322,336 | 32 | 993.96 | 786.86 | {
"accuracy": 0.8827853303825483,
"f1_macro": 0.8756500209958136,
"f1_weighted": 0.8831353714515359,
"precision": 0.8771788767815625,
"recall": 0.8745545422664613
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 428,001,101 | 20,644,672 | 64 | 1060.35 | 794.72 | {
"accuracy": 0.8885551691432184,
"f1_macro": 0.882663097395477,
"f1_weighted": 0.8888437665388873,
"precision": 0.8832717599594526,
"recall": 0.8823910207284164
} |
google/flan-t5-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"wi_0",
"dense",
"v",
"wi_1",
"k",
"out_proj",
"q",
"wo",
"o"
] | 225,209,153 | 1,705,012 | 4 | 2640.87 | 1719.28 | {
"accuracy": 0.19008852355358838,
"f1_macro": 0.09162888412305616,
"f1_weighted": 0.11225337817161031,
"precision": 0.10799035490498364,
"recall": 0.15256261921842296
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 448,645,773 | 41,289,344 | 128 | 1191.16 | 798.64 | {
"accuracy": 0.8938507745810939,
"f1_macro": 0.8886604857922034,
"f1_weighted": 0.8940994763136924,
"precision": 0.8894674968455903,
"recall": 0.8881550093984057
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 489,935,117 | 82,578,688 | 256 | 1475.45 | 847.16 | {
"accuracy": 0.8988302244704395,
"f1_macro": 0.8937572884194912,
"f1_weighted": 0.8989465228549908,
"precision": 0.8946194192964826,
"recall": 0.8930720068510613
} |
google/flan-t5-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"out_proj"
] | 225,218,421 | 1,714,280 | 8 | 2640.98 | 1715.72 | {
"accuracy": 0.5288491938033513,
"f1_macro": 0.4391417107422545,
"f1_weighted": 0.49398884315574265,
"precision": 0.48366043598203423,
"recall": 0.4656912252805828
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k_proj",
"q_proj",
"v_proj"
] | 490,819,853 | 83,463,424 | 4 | 1682.77 | 1239.16 | {
"accuracy": 0.9018337021814733,
"f1_macro": 0.8973518270033878,
"f1_weighted": 0.901926763806326,
"precision": 0.8983431633289526,
"recall": 0.8964917421940851
} |
google/flan-t5-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k",
"o",
"q",
"v",
"wi_0",
"wi_1",
"wo"
] | 226,914,165 | 3,410,024 | 8 | 2684.9 | 1725.93 | {
"accuracy": 0.5288491938033513,
"f1_macro": 0.4370755285845007,
"f1_weighted": 0.4946075324705599,
"precision": 0.49482949226226025,
"recall": 0.4584299106614047
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k_proj",
"q_proj",
"v_proj"
] | 491,704,589 | 84,348,160 | 8 | 1688.75 | 1255.55 | {
"accuracy": 0.9037306354726525,
"f1_macro": 0.8994046606341128,
"f1_weighted": 0.903815252990923,
"precision": 0.9003330113724781,
"recall": 0.8986246033127124
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k_proj",
"q_proj",
"v_proj"
] | 493,474,061 | 86,117,632 | 16 | 1700.69 | 1253.9 | {
"accuracy": 0.901280429971546,
"f1_macro": 0.8969377414857906,
"f1_weighted": 0.9013812193718886,
"precision": 0.8981345585782979,
"recall": 0.8959601687765681
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k_proj",
"q_proj",
"v_proj"
] | 497,013,005 | 89,656,576 | 32 | 1724.58 | 1250.48 | {
"accuracy": 0.8977236800505849,
"f1_macro": 0.8929478798534165,
"f1_weighted": 0.8978605616617547,
"precision": 0.8933585360821303,
"recall": 0.892692971979935
} |
google/flan-t5-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"dense",
"out_proj"
] | 226,932,701 | 3,428,560 | 16 | 2686.26 | 1722.61 | {
"accuracy": 0.7421751501738856,
"f1_macro": 0.6502244728766655,
"f1_weighted": 0.7159253112324739,
"precision": 0.7405507369342138,
"recall": 0.6782645772149173
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k_proj",
"q_proj",
"v_proj"
] | 504,090,893 | 96,734,464 | 64 | 1772.57 | 1257.79 | {
"accuracy": 0.8964590578564654,
"f1_macro": 0.8912967448549094,
"f1_weighted": 0.896556411120817,
"precision": 0.8910517534139267,
"recall": 0.8916823210117144
} |
google/flan-t5-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k",
"o",
"q",
"v",
"wi_0",
"wi_1",
"wo"
] | 230,324,189 | 6,820,048 | 16 | 2726.74 | 1731.62 | {
"accuracy": 0.7667562440720834,
"f1_macro": 0.6781852408077077,
"f1_weighted": 0.7409036527368469,
"precision": 0.7470279056362396,
"recall": 0.702489786974115
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k_proj",
"q_proj",
"v_proj"
] | 518,246,669 | 110,890,240 | 128 | 1868.57 | 1263.44 | {
"accuracy": 0.8917167246285173,
"f1_macro": 0.8851692432395832,
"f1_weighted": 0.8918644725757472,
"precision": 0.8839875483112685,
"recall": 0.8865963690779248
} |
RUCAIBox/mvp | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
... | [
"k_proj",
"q_proj",
"v_proj"
] | 546,558,221 | 139,201,792 | 256 | 2060.56 | 1267.53 | {
"accuracy": 0.8906892190957951,
"f1_macro": 0.8846087919705622,
"f1_weighted": 0.8907712876699946,
"precision": 0.8834965546483843,
"recall": 0.8859103010451855
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.