Dataset Viewer
	results
				 
			dict  | versions
				 
			dict  | config_general
				 
			dict  | 
|---|---|---|
	{
  "harness|ko_arc_challenge|25": {
    "acc": 0.2790102389078498,
    "acc_stderr": 0.013106784883601355,
    "acc_norm": 0.3455631399317406,
    "acc_norm_stderr": 0.01389693846114568
  },
  "harness|ko_hellaswag|10": {
    "acc": 0.33808006373232424,
    "acc_stderr": 0.004720891597174718,
    "acc_norm": 0.45120493925512845,
    "acc_norm_stderr": 0.0049659636472103195
  },
  "harness|ko_mmlu_world_religions|5": {
    "acc": 0.34502923976608185,
    "acc_stderr": 0.036459813773888065,
    "acc_norm": 0.34502923976608185,
    "acc_norm_stderr": 0.036459813773888065
  },
  "harness|ko_mmlu_management|5": {
    "acc": 0.21359223300970873,
    "acc_stderr": 0.04058042015646034,
    "acc_norm": 0.21359223300970873,
    "acc_norm_stderr": 0.04058042015646034
  },
  "harness|ko_mmlu_miscellaneous|5": {
    "acc": 0.31545338441890164,
    "acc_stderr": 0.016617501738763394,
    "acc_norm": 0.31545338441890164,
    "acc_norm_stderr": 0.016617501738763394
  },
  "harness|ko_mmlu_anatomy|5": {
    "acc": 0.3851851851851852,
    "acc_stderr": 0.042039210401562783,
    "acc_norm": 0.3851851851851852,
    "acc_norm_stderr": 0.042039210401562783
  },
  "harness|ko_mmlu_abstract_algebra|5": {
    "acc": 0.32,
    "acc_stderr": 0.046882617226215034,
    "acc_norm": 0.32,
    "acc_norm_stderr": 0.046882617226215034
  },
  "harness|ko_mmlu_conceptual_physics|5": {
    "acc": 0.3829787234042553,
    "acc_stderr": 0.03177821250236922,
    "acc_norm": 0.3829787234042553,
    "acc_norm_stderr": 0.03177821250236922
  },
  "harness|ko_mmlu_virology|5": {
    "acc": 0.2710843373493976,
    "acc_stderr": 0.03460579907553027,
    "acc_norm": 0.2710843373493976,
    "acc_norm_stderr": 0.03460579907553027
  },
  "harness|ko_mmlu_philosophy|5": {
    "acc": 0.31189710610932475,
    "acc_stderr": 0.026311858071854155,
    "acc_norm": 0.31189710610932475,
    "acc_norm_stderr": 0.026311858071854155
  },
  "harness|ko_mmlu_human_aging|5": {
    "acc": 0.3811659192825112,
    "acc_stderr": 0.03259625118416827,
    "acc_norm": 0.3811659192825112,
    "acc_norm_stderr": 0.03259625118416827
  },
  "harness|ko_mmlu_human_sexuality|5": {
    "acc": 0.3053435114503817,
    "acc_stderr": 0.04039314978724562,
    "acc_norm": 0.3053435114503817,
    "acc_norm_stderr": 0.04039314978724562
  },
  "harness|ko_mmlu_medical_genetics|5": {
    "acc": 0.25,
    "acc_stderr": 0.04351941398892446,
    "acc_norm": 0.25,
    "acc_norm_stderr": 0.04351941398892446
  },
  "harness|ko_mmlu_high_school_geography|5": {
    "acc": 0.25252525252525254,
    "acc_stderr": 0.030954055470365907,
    "acc_norm": 0.25252525252525254,
    "acc_norm_stderr": 0.030954055470365907
  },
  "harness|ko_mmlu_electrical_engineering|5": {
    "acc": 0.296551724137931,
    "acc_stderr": 0.038061426873099935,
    "acc_norm": 0.296551724137931,
    "acc_norm_stderr": 0.038061426873099935
  },
  "harness|ko_mmlu_college_physics|5": {
    "acc": 0.24509803921568626,
    "acc_stderr": 0.04280105837364396,
    "acc_norm": 0.24509803921568626,
    "acc_norm_stderr": 0.04280105837364396
  },
  "harness|ko_mmlu_high_school_microeconomics|5": {
    "acc": 0.25210084033613445,
    "acc_stderr": 0.028205545033277723,
    "acc_norm": 0.25210084033613445,
    "acc_norm_stderr": 0.028205545033277723
  },
  "harness|ko_mmlu_high_school_macroeconomics|5": {
    "acc": 0.2358974358974359,
    "acc_stderr": 0.021525965407408726,
    "acc_norm": 0.2358974358974359,
    "acc_norm_stderr": 0.021525965407408726
  },
  "harness|ko_mmlu_computer_security|5": {
    "acc": 0.35,
    "acc_stderr": 0.047937248544110196,
    "acc_norm": 0.35,
    "acc_norm_stderr": 0.047937248544110196
  },
  "harness|ko_mmlu_global_facts|5": {
    "acc": 0.25,
    "acc_stderr": 0.04351941398892446,
    "acc_norm": 0.25,
    "acc_norm_stderr": 0.04351941398892446
  },
  "harness|ko_mmlu_jurisprudence|5": {
    "acc": 0.2962962962962963,
    "acc_stderr": 0.04414343666854932,
    "acc_norm": 0.2962962962962963,
    "acc_norm_stderr": 0.04414343666854932
  },
  "harness|ko_mmlu_high_school_chemistry|5": {
    "acc": 0.22167487684729065,
    "acc_stderr": 0.0292255758924896,
    "acc_norm": 0.22167487684729065,
    "acc_norm_stderr": 0.0292255758924896
  },
  "harness|ko_mmlu_high_school_biology|5": {
    "acc": 0.2903225806451613,
    "acc_stderr": 0.025822106119415895,
    "acc_norm": 0.2903225806451613,
    "acc_norm_stderr": 0.025822106119415895
  },
  "harness|ko_mmlu_marketing|5": {
    "acc": 0.3247863247863248,
    "acc_stderr": 0.03067902276549883,
    "acc_norm": 0.3247863247863248,
    "acc_norm_stderr": 0.03067902276549883
  },
  "harness|ko_mmlu_clinical_knowledge|5": {
    "acc": 0.29056603773584905,
    "acc_stderr": 0.02794321998933716,
    "acc_norm": 0.29056603773584905,
    "acc_norm_stderr": 0.02794321998933716
  },
  "harness|ko_mmlu_public_relations|5": {
    "acc": 0.2545454545454545,
    "acc_stderr": 0.04172343038705383,
    "acc_norm": 0.2545454545454545,
    "acc_norm_stderr": 0.04172343038705383
  },
  "harness|ko_mmlu_high_school_mathematics|5": {
    "acc": 0.27037037037037037,
    "acc_stderr": 0.027080372815145668,
    "acc_norm": 0.27037037037037037,
    "acc_norm_stderr": 0.027080372815145668
  },
  "harness|ko_mmlu_high_school_physics|5": {
    "acc": 0.2781456953642384,
    "acc_stderr": 0.03658603262763744,
    "acc_norm": 0.2781456953642384,
    "acc_norm_stderr": 0.03658603262763744
  },
  "harness|ko_mmlu_sociology|5": {
    "acc": 0.3333333333333333,
    "acc_stderr": 0.03333333333333334,
    "acc_norm": 0.3333333333333333,
    "acc_norm_stderr": 0.03333333333333334
  },
  "harness|ko_mmlu_college_medicine|5": {
    "acc": 0.2138728323699422,
    "acc_stderr": 0.03126511206173042,
    "acc_norm": 0.2138728323699422,
    "acc_norm_stderr": 0.03126511206173042
  },
  "harness|ko_mmlu_elementary_mathematics|5": {
    "acc": 0.26455026455026454,
    "acc_stderr": 0.022717467897708614,
    "acc_norm": 0.26455026455026454,
    "acc_norm_stderr": 0.022717467897708614
  },
  "harness|ko_mmlu_college_biology|5": {
    "acc": 0.2847222222222222,
    "acc_stderr": 0.03773809990686935,
    "acc_norm": 0.2847222222222222,
    "acc_norm_stderr": 0.03773809990686935
  },
  "harness|ko_mmlu_college_chemistry|5": {
    "acc": 0.22,
    "acc_stderr": 0.04163331998932269,
    "acc_norm": 0.22,
    "acc_norm_stderr": 0.04163331998932269
  },
  "harness|ko_mmlu_us_foreign_policy|5": {
    "acc": 0.44,
    "acc_stderr": 0.04988876515698589,
    "acc_norm": 0.44,
    "acc_norm_stderr": 0.04988876515698589
  },
  "harness|ko_mmlu_moral_disputes|5": {
    "acc": 0.2947976878612717,
    "acc_stderr": 0.024547617794803828,
    "acc_norm": 0.2947976878612717,
    "acc_norm_stderr": 0.024547617794803828
  },
  "harness|ko_mmlu_logical_fallacies|5": {
    "acc": 0.2822085889570552,
    "acc_stderr": 0.03536117886664743,
    "acc_norm": 0.2822085889570552,
    "acc_norm_stderr": 0.03536117886664743
  },
  "harness|ko_mmlu_prehistory|5": {
    "acc": 0.2839506172839506,
    "acc_stderr": 0.025089478523765127,
    "acc_norm": 0.2839506172839506,
    "acc_norm_stderr": 0.025089478523765127
  },
  "harness|ko_mmlu_college_mathematics|5": {
    "acc": 0.25,
    "acc_stderr": 0.04351941398892446,
    "acc_norm": 0.25,
    "acc_norm_stderr": 0.04351941398892446
  },
  "harness|ko_mmlu_high_school_government_and_politics|5": {
    "acc": 0.27461139896373055,
    "acc_stderr": 0.032210245080411516,
    "acc_norm": 0.27461139896373055,
    "acc_norm_stderr": 0.032210245080411516
  },
  "harness|ko_mmlu_econometrics|5": {
    "acc": 0.2543859649122807,
    "acc_stderr": 0.040969851398436716,
    "acc_norm": 0.2543859649122807,
    "acc_norm_stderr": 0.040969851398436716
  },
  "harness|ko_mmlu_high_school_psychology|5": {
    "acc": 0.25137614678899084,
    "acc_stderr": 0.018599206360287415,
    "acc_norm": 0.25137614678899084,
    "acc_norm_stderr": 0.018599206360287415
  },
  "harness|ko_mmlu_formal_logic|5": {
    "acc": 0.19047619047619047,
    "acc_stderr": 0.03512207412302052,
    "acc_norm": 0.19047619047619047,
    "acc_norm_stderr": 0.03512207412302052
  },
  "harness|ko_mmlu_nutrition|5": {
    "acc": 0.29411764705882354,
    "acc_stderr": 0.026090162504279035,
    "acc_norm": 0.29411764705882354,
    "acc_norm_stderr": 0.026090162504279035
  },
  "harness|ko_mmlu_business_ethics|5": {
    "acc": 0.4,
    "acc_stderr": 0.049236596391733084,
    "acc_norm": 0.4,
    "acc_norm_stderr": 0.049236596391733084
  },
  "harness|ko_mmlu_international_law|5": {
    "acc": 0.2975206611570248,
    "acc_stderr": 0.04173349148083499,
    "acc_norm": 0.2975206611570248,
    "acc_norm_stderr": 0.04173349148083499
  },
  "harness|ko_mmlu_astronomy|5": {
    "acc": 0.17105263157894737,
    "acc_stderr": 0.030643607071677098,
    "acc_norm": 0.17105263157894737,
    "acc_norm_stderr": 0.030643607071677098
  },
  "harness|ko_mmlu_professional_psychology|5": {
    "acc": 0.3022875816993464,
    "acc_stderr": 0.018579232711113874,
    "acc_norm": 0.3022875816993464,
    "acc_norm_stderr": 0.018579232711113874
  },
  "harness|ko_mmlu_professional_accounting|5": {
    "acc": 0.2765957446808511,
    "acc_stderr": 0.026684564340461014,
    "acc_norm": 0.2765957446808511,
    "acc_norm_stderr": 0.026684564340461014
  },
  "harness|ko_mmlu_machine_learning|5": {
    "acc": 0.21428571428571427,
    "acc_stderr": 0.03894641120044793,
    "acc_norm": 0.21428571428571427,
    "acc_norm_stderr": 0.03894641120044793
  },
  "harness|ko_mmlu_high_school_statistics|5": {
    "acc": 0.25925925925925924,
    "acc_stderr": 0.029886910547626978,
    "acc_norm": 0.25925925925925924,
    "acc_norm_stderr": 0.029886910547626978
  },
  "harness|ko_mmlu_moral_scenarios|5": {
    "acc": 0.2435754189944134,
    "acc_stderr": 0.014355911964767857,
    "acc_norm": 0.2435754189944134,
    "acc_norm_stderr": 0.014355911964767857
  },
  "harness|ko_mmlu_college_computer_science|5": {
    "acc": 0.31,
    "acc_stderr": 0.04648231987117316,
    "acc_norm": 0.31,
    "acc_norm_stderr": 0.04648231987117316
  },
  "harness|ko_mmlu_high_school_computer_science|5": {
    "acc": 0.34,
    "acc_stderr": 0.04760952285695236,
    "acc_norm": 0.34,
    "acc_norm_stderr": 0.04760952285695236
  },
  "harness|ko_mmlu_professional_medicine|5": {
    "acc": 0.1801470588235294,
    "acc_stderr": 0.02334516361654488,
    "acc_norm": 0.1801470588235294,
    "acc_norm_stderr": 0.02334516361654488
  },
  "harness|ko_mmlu_security_studies|5": {
    "acc": 0.24489795918367346,
    "acc_stderr": 0.027529637440174934,
    "acc_norm": 0.24489795918367346,
    "acc_norm_stderr": 0.027529637440174934
  },
  "harness|ko_mmlu_high_school_world_history|5": {
    "acc": 0.33755274261603374,
    "acc_stderr": 0.030781549102026216,
    "acc_norm": 0.33755274261603374,
    "acc_norm_stderr": 0.030781549102026216
  },
  "harness|ko_mmlu_professional_law|5": {
    "acc": 0.26401564537157757,
    "acc_stderr": 0.011258435537723814,
    "acc_norm": 0.26401564537157757,
    "acc_norm_stderr": 0.011258435537723814
  },
  "harness|ko_mmlu_high_school_us_history|5": {
    "acc": 0.22058823529411764,
    "acc_stderr": 0.02910225438967407,
    "acc_norm": 0.22058823529411764,
    "acc_norm_stderr": 0.02910225438967407
  },
  "harness|ko_mmlu_high_school_european_history|5": {
    "acc": 0.24242424242424243,
    "acc_stderr": 0.03346409881055952,
    "acc_norm": 0.24242424242424243,
    "acc_norm_stderr": 0.03346409881055952
  },
  "harness|ko_truthfulqa_mc|0": {
    "mc1": 0.25703794369645044,
    "mc1_stderr": 0.015298077509485083,
    "mc2": 0.42530376345187815,
    "mc2_stderr": 0.015252754425393767
  },
  "harness|ko_commongen_v2|2": {
    "acc": 0.15584415584415584,
    "acc_stderr": 0.012470141877923077,
    "acc_norm": 0.3577331759149941,
    "acc_norm_stderr": 0.016479808935749976
  }
} 
 | 
	{
  "all": 0,
  "harness|ko_arc_challenge|25": 0,
  "harness|ko_hellaswag|10": 0,
  "harness|ko_mmlu_world_religions|5": 1,
  "harness|ko_mmlu_management|5": 1,
  "harness|ko_mmlu_miscellaneous|5": 1,
  "harness|ko_mmlu_anatomy|5": 1,
  "harness|ko_mmlu_abstract_algebra|5": 1,
  "harness|ko_mmlu_conceptual_physics|5": 1,
  "harness|ko_mmlu_virology|5": 1,
  "harness|ko_mmlu_philosophy|5": 1,
  "harness|ko_mmlu_human_aging|5": 1,
  "harness|ko_mmlu_human_sexuality|5": 1,
  "harness|ko_mmlu_medical_genetics|5": 1,
  "harness|ko_mmlu_high_school_geography|5": 1,
  "harness|ko_mmlu_electrical_engineering|5": 1,
  "harness|ko_mmlu_college_physics|5": 1,
  "harness|ko_mmlu_high_school_microeconomics|5": 1,
  "harness|ko_mmlu_high_school_macroeconomics|5": 1,
  "harness|ko_mmlu_computer_security|5": 1,
  "harness|ko_mmlu_global_facts|5": 1,
  "harness|ko_mmlu_jurisprudence|5": 1,
  "harness|ko_mmlu_high_school_chemistry|5": 1,
  "harness|ko_mmlu_high_school_biology|5": 1,
  "harness|ko_mmlu_marketing|5": 1,
  "harness|ko_mmlu_clinical_knowledge|5": 1,
  "harness|ko_mmlu_public_relations|5": 1,
  "harness|ko_mmlu_high_school_mathematics|5": 1,
  "harness|ko_mmlu_high_school_physics|5": 1,
  "harness|ko_mmlu_sociology|5": 1,
  "harness|ko_mmlu_college_medicine|5": 1,
  "harness|ko_mmlu_elementary_mathematics|5": 1,
  "harness|ko_mmlu_college_biology|5": 1,
  "harness|ko_mmlu_college_chemistry|5": 1,
  "harness|ko_mmlu_us_foreign_policy|5": 1,
  "harness|ko_mmlu_moral_disputes|5": 1,
  "harness|ko_mmlu_logical_fallacies|5": 1,
  "harness|ko_mmlu_prehistory|5": 1,
  "harness|ko_mmlu_college_mathematics|5": 1,
  "harness|ko_mmlu_high_school_government_and_politics|5": 1,
  "harness|ko_mmlu_econometrics|5": 1,
  "harness|ko_mmlu_high_school_psychology|5": 1,
  "harness|ko_mmlu_formal_logic|5": 1,
  "harness|ko_mmlu_nutrition|5": 1,
  "harness|ko_mmlu_business_ethics|5": 1,
  "harness|ko_mmlu_international_law|5": 1,
  "harness|ko_mmlu_astronomy|5": 1,
  "harness|ko_mmlu_professional_psychology|5": 1,
  "harness|ko_mmlu_professional_accounting|5": 1,
  "harness|ko_mmlu_machine_learning|5": 1,
  "harness|ko_mmlu_high_school_statistics|5": 1,
  "harness|ko_mmlu_moral_scenarios|5": 1,
  "harness|ko_mmlu_college_computer_science|5": 1,
  "harness|ko_mmlu_high_school_computer_science|5": 1,
  "harness|ko_mmlu_professional_medicine|5": 1,
  "harness|ko_mmlu_security_studies|5": 1,
  "harness|ko_mmlu_high_school_world_history|5": 1,
  "harness|ko_mmlu_professional_law|5": 1,
  "harness|ko_mmlu_high_school_us_history|5": 1,
  "harness|ko_mmlu_high_school_european_history|5": 1,
  "harness|ko_truthfulqa_mc|0": 0,
  "harness|ko_commongen_v2|2": 1
} 
 | 
	{
  "model_name": "beomi/llama-2-ko-7b-emb-dev",
  "model_sha": "f1ff977bd4ee3f0c2a3ee7dd1c4b7750e3a0766c",
  "model_dtype": "torch.float16",
  "lighteval_sha": "",
  "num_few_shot_default": 0,
  "num_fewshot_seeds": 1,
  "override_batch_size": 1,
  "max_samples": null
} 
 | 
					
	{
  "harness|ko_arc_challenge|25": {
    "acc": 0.30204778156996587,
    "acc_stderr": 0.013417519144716429,
    "acc_norm": 0.378839590443686,
    "acc_norm_stderr": 0.014175915490000324
  },
  "harness|ko_hellaswag|10": {
    "acc": 0.35480979884485164,
    "acc_stderr": 0.0047747781803451845,
    "acc_norm": 0.47390957976498704,
    "acc_norm_stderr": 0.0049829835924591935
  },
  "harness|ko_mmlu_world_religions|5": {
    "acc": 0.24561403508771928,
    "acc_stderr": 0.033014059469872487,
    "acc_norm": 0.24561403508771928,
    "acc_norm_stderr": 0.033014059469872487
  },
  "harness|ko_mmlu_management|5": {
    "acc": 0.24271844660194175,
    "acc_stderr": 0.04245022486384493,
    "acc_norm": 0.24271844660194175,
    "acc_norm_stderr": 0.04245022486384493
  },
  "harness|ko_mmlu_miscellaneous|5": {
    "acc": 0.3269476372924649,
    "acc_stderr": 0.01677490818013146,
    "acc_norm": 0.3269476372924649,
    "acc_norm_stderr": 0.01677490818013146
  },
  "harness|ko_mmlu_anatomy|5": {
    "acc": 0.35555555555555557,
    "acc_stderr": 0.04135176749720386,
    "acc_norm": 0.35555555555555557,
    "acc_norm_stderr": 0.04135176749720386
  },
  "harness|ko_mmlu_abstract_algebra|5": {
    "acc": 0.32,
    "acc_stderr": 0.046882617226215034,
    "acc_norm": 0.32,
    "acc_norm_stderr": 0.046882617226215034
  },
  "harness|ko_mmlu_conceptual_physics|5": {
    "acc": 0.35319148936170214,
    "acc_stderr": 0.031245325202761926,
    "acc_norm": 0.35319148936170214,
    "acc_norm_stderr": 0.031245325202761926
  },
  "harness|ko_mmlu_virology|5": {
    "acc": 0.3855421686746988,
    "acc_stderr": 0.03789134424611548,
    "acc_norm": 0.3855421686746988,
    "acc_norm_stderr": 0.03789134424611548
  },
  "harness|ko_mmlu_philosophy|5": {
    "acc": 0.3215434083601286,
    "acc_stderr": 0.026527724079528872,
    "acc_norm": 0.3215434083601286,
    "acc_norm_stderr": 0.026527724079528872
  },
  "harness|ko_mmlu_human_aging|5": {
    "acc": 0.30493273542600896,
    "acc_stderr": 0.03089861088247751,
    "acc_norm": 0.30493273542600896,
    "acc_norm_stderr": 0.03089861088247751
  },
  "harness|ko_mmlu_human_sexuality|5": {
    "acc": 0.2748091603053435,
    "acc_stderr": 0.039153454088478354,
    "acc_norm": 0.2748091603053435,
    "acc_norm_stderr": 0.039153454088478354
  },
  "harness|ko_mmlu_medical_genetics|5": {
    "acc": 0.24,
    "acc_stderr": 0.04292346959909282,
    "acc_norm": 0.24,
    "acc_norm_stderr": 0.04292346959909282
  },
  "harness|ko_mmlu_high_school_geography|5": {
    "acc": 0.29292929292929293,
    "acc_stderr": 0.03242497958178817,
    "acc_norm": 0.29292929292929293,
    "acc_norm_stderr": 0.03242497958178817
  },
  "harness|ko_mmlu_electrical_engineering|5": {
    "acc": 0.2896551724137931,
    "acc_stderr": 0.03780019230438013,
    "acc_norm": 0.2896551724137931,
    "acc_norm_stderr": 0.03780019230438013
  },
  "harness|ko_mmlu_college_physics|5": {
    "acc": 0.20588235294117646,
    "acc_stderr": 0.04023382273617746,
    "acc_norm": 0.20588235294117646,
    "acc_norm_stderr": 0.04023382273617746
  },
  "harness|ko_mmlu_high_school_microeconomics|5": {
    "acc": 0.3403361344537815,
    "acc_stderr": 0.03077805742293167,
    "acc_norm": 0.3403361344537815,
    "acc_norm_stderr": 0.03077805742293167
  },
  "harness|ko_mmlu_high_school_macroeconomics|5": {
    "acc": 0.3,
    "acc_stderr": 0.023234581088428494,
    "acc_norm": 0.3,
    "acc_norm_stderr": 0.023234581088428494
  },
  "harness|ko_mmlu_computer_security|5": {
    "acc": 0.31,
    "acc_stderr": 0.046482319871173156,
    "acc_norm": 0.31,
    "acc_norm_stderr": 0.046482319871173156
  },
  "harness|ko_mmlu_global_facts|5": {
    "acc": 0.33,
    "acc_stderr": 0.047258156262526045,
    "acc_norm": 0.33,
    "acc_norm_stderr": 0.047258156262526045
  },
  "harness|ko_mmlu_jurisprudence|5": {
    "acc": 0.2777777777777778,
    "acc_stderr": 0.043300437496507416,
    "acc_norm": 0.2777777777777778,
    "acc_norm_stderr": 0.043300437496507416
  },
  "harness|ko_mmlu_high_school_chemistry|5": {
    "acc": 0.3251231527093596,
    "acc_stderr": 0.03295797566311271,
    "acc_norm": 0.3251231527093596,
    "acc_norm_stderr": 0.03295797566311271
  },
  "harness|ko_mmlu_high_school_biology|5": {
    "acc": 0.3193548387096774,
    "acc_stderr": 0.026522709674667768,
    "acc_norm": 0.3193548387096774,
    "acc_norm_stderr": 0.026522709674667768
  },
  "harness|ko_mmlu_marketing|5": {
    "acc": 0.3034188034188034,
    "acc_stderr": 0.03011821010694266,
    "acc_norm": 0.3034188034188034,
    "acc_norm_stderr": 0.03011821010694266
  },
  "harness|ko_mmlu_clinical_knowledge|5": {
    "acc": 0.2830188679245283,
    "acc_stderr": 0.0277242364927009,
    "acc_norm": 0.2830188679245283,
    "acc_norm_stderr": 0.0277242364927009
  },
  "harness|ko_mmlu_public_relations|5": {
    "acc": 0.3181818181818182,
    "acc_stderr": 0.04461272175910508,
    "acc_norm": 0.3181818181818182,
    "acc_norm_stderr": 0.04461272175910508
  },
  "harness|ko_mmlu_high_school_mathematics|5": {
    "acc": 0.29259259259259257,
    "acc_stderr": 0.02773896963217609,
    "acc_norm": 0.29259259259259257,
    "acc_norm_stderr": 0.02773896963217609
  },
  "harness|ko_mmlu_high_school_physics|5": {
    "acc": 0.304635761589404,
    "acc_stderr": 0.03757949922943343,
    "acc_norm": 0.304635761589404,
    "acc_norm_stderr": 0.03757949922943343
  },
  "harness|ko_mmlu_sociology|5": {
    "acc": 0.3880597014925373,
    "acc_stderr": 0.0344578996436275,
    "acc_norm": 0.3880597014925373,
    "acc_norm_stderr": 0.0344578996436275
  },
  "harness|ko_mmlu_college_medicine|5": {
    "acc": 0.24277456647398843,
    "acc_stderr": 0.0326926380614177,
    "acc_norm": 0.24277456647398843,
    "acc_norm_stderr": 0.0326926380614177
  },
  "harness|ko_mmlu_elementary_mathematics|5": {
    "acc": 0.2804232804232804,
    "acc_stderr": 0.023135287974325635,
    "acc_norm": 0.2804232804232804,
    "acc_norm_stderr": 0.023135287974325635
  },
  "harness|ko_mmlu_college_biology|5": {
    "acc": 0.2777777777777778,
    "acc_stderr": 0.037455547914624576,
    "acc_norm": 0.2777777777777778,
    "acc_norm_stderr": 0.037455547914624576
  },
  "harness|ko_mmlu_college_chemistry|5": {
    "acc": 0.22,
    "acc_stderr": 0.041633319989322695,
    "acc_norm": 0.22,
    "acc_norm_stderr": 0.041633319989322695
  },
  "harness|ko_mmlu_us_foreign_policy|5": {
    "acc": 0.34,
    "acc_stderr": 0.047609522856952365,
    "acc_norm": 0.34,
    "acc_norm_stderr": 0.047609522856952365
  },
  "harness|ko_mmlu_moral_disputes|5": {
    "acc": 0.26878612716763006,
    "acc_stderr": 0.023868003262500104,
    "acc_norm": 0.26878612716763006,
    "acc_norm_stderr": 0.023868003262500104
  },
  "harness|ko_mmlu_logical_fallacies|5": {
    "acc": 0.26993865030674846,
    "acc_stderr": 0.03487825168497892,
    "acc_norm": 0.26993865030674846,
    "acc_norm_stderr": 0.03487825168497892
  },
  "harness|ko_mmlu_prehistory|5": {
    "acc": 0.3117283950617284,
    "acc_stderr": 0.025773111169630453,
    "acc_norm": 0.3117283950617284,
    "acc_norm_stderr": 0.025773111169630453
  },
  "harness|ko_mmlu_college_mathematics|5": {
    "acc": 0.27,
    "acc_stderr": 0.044619604333847394,
    "acc_norm": 0.27,
    "acc_norm_stderr": 0.044619604333847394
  },
  "harness|ko_mmlu_high_school_government_and_politics|5": {
    "acc": 0.26424870466321243,
    "acc_stderr": 0.03182155050916647,
    "acc_norm": 0.26424870466321243,
    "acc_norm_stderr": 0.03182155050916647
  },
  "harness|ko_mmlu_econometrics|5": {
    "acc": 0.2719298245614035,
    "acc_stderr": 0.04185774424022056,
    "acc_norm": 0.2719298245614035,
    "acc_norm_stderr": 0.04185774424022056
  },
  "harness|ko_mmlu_high_school_psychology|5": {
    "acc": 0.26055045871559634,
    "acc_stderr": 0.018819182034850068,
    "acc_norm": 0.26055045871559634,
    "acc_norm_stderr": 0.018819182034850068
  },
  "harness|ko_mmlu_formal_logic|5": {
    "acc": 0.20634920634920634,
    "acc_stderr": 0.036196045241242494,
    "acc_norm": 0.20634920634920634,
    "acc_norm_stderr": 0.036196045241242494
  },
  "harness|ko_mmlu_nutrition|5": {
    "acc": 0.31699346405228757,
    "acc_stderr": 0.026643278474508755,
    "acc_norm": 0.31699346405228757,
    "acc_norm_stderr": 0.026643278474508755
  },
  "harness|ko_mmlu_business_ethics|5": {
    "acc": 0.31,
    "acc_stderr": 0.04648231987117316,
    "acc_norm": 0.31,
    "acc_norm_stderr": 0.04648231987117316
  },
  "harness|ko_mmlu_international_law|5": {
    "acc": 0.4132231404958678,
    "acc_stderr": 0.04495087843548408,
    "acc_norm": 0.4132231404958678,
    "acc_norm_stderr": 0.04495087843548408
  },
  "harness|ko_mmlu_astronomy|5": {
    "acc": 0.19736842105263158,
    "acc_stderr": 0.03238981601699397,
    "acc_norm": 0.19736842105263158,
    "acc_norm_stderr": 0.03238981601699397
  },
  "harness|ko_mmlu_professional_psychology|5": {
    "acc": 0.2565359477124183,
    "acc_stderr": 0.017667841612379002,
    "acc_norm": 0.2565359477124183,
    "acc_norm_stderr": 0.017667841612379002
  },
  "harness|ko_mmlu_professional_accounting|5": {
    "acc": 0.2553191489361702,
    "acc_stderr": 0.026011992930902006,
    "acc_norm": 0.2553191489361702,
    "acc_norm_stderr": 0.026011992930902006
  },
  "harness|ko_mmlu_machine_learning|5": {
    "acc": 0.25,
    "acc_stderr": 0.04109974682633932,
    "acc_norm": 0.25,
    "acc_norm_stderr": 0.04109974682633932
  },
  "harness|ko_mmlu_high_school_statistics|5": {
    "acc": 0.3148148148148148,
    "acc_stderr": 0.03167468706828977,
    "acc_norm": 0.3148148148148148,
    "acc_norm_stderr": 0.03167468706828977
  },
  "harness|ko_mmlu_moral_scenarios|5": {
    "acc": 0.23575418994413408,
    "acc_stderr": 0.014196375686290803,
    "acc_norm": 0.23575418994413408,
    "acc_norm_stderr": 0.014196375686290803
  },
  "harness|ko_mmlu_college_computer_science|5": {
    "acc": 0.25,
    "acc_stderr": 0.04351941398892446,
    "acc_norm": 0.25,
    "acc_norm_stderr": 0.04351941398892446
  },
  "harness|ko_mmlu_high_school_computer_science|5": {
    "acc": 0.23,
    "acc_stderr": 0.042295258468165065,
    "acc_norm": 0.23,
    "acc_norm_stderr": 0.042295258468165065
  },
  "harness|ko_mmlu_professional_medicine|5": {
    "acc": 0.4375,
    "acc_stderr": 0.030134614954403924,
    "acc_norm": 0.4375,
    "acc_norm_stderr": 0.030134614954403924
  },
  "harness|ko_mmlu_security_studies|5": {
    "acc": 0.4204081632653061,
    "acc_stderr": 0.03160106993449604,
    "acc_norm": 0.4204081632653061,
    "acc_norm_stderr": 0.03160106993449604
  },
  "harness|ko_mmlu_high_school_world_history|5": {
    "acc": 0.2869198312236287,
    "acc_stderr": 0.029443773022594693,
    "acc_norm": 0.2869198312236287,
    "acc_norm_stderr": 0.029443773022594693
  },
  "harness|ko_mmlu_professional_law|5": {
    "acc": 0.2607561929595828,
    "acc_stderr": 0.011213471559602334,
    "acc_norm": 0.2607561929595828,
    "acc_norm_stderr": 0.011213471559602334
  },
  "harness|ko_mmlu_high_school_us_history|5": {
    "acc": 0.25,
    "acc_stderr": 0.03039153369274154,
    "acc_norm": 0.25,
    "acc_norm_stderr": 0.03039153369274154
  },
  "harness|ko_mmlu_high_school_european_history|5": {
    "acc": 0.2909090909090909,
    "acc_stderr": 0.03546563019624337,
    "acc_norm": 0.2909090909090909,
    "acc_norm_stderr": 0.03546563019624337
  },
  "harness|ko_truthfulqa_mc|0": {
    "mc1": 0.24969400244798043,
    "mc1_stderr": 0.015152286907148123,
    "mc2": 0.4175794689167079,
    "mc2_stderr": 0.01513967881843377
  },
  "harness|ko_commongen_v2|2": {
    "acc": 0.13695395513577333,
    "acc_stderr": 0.011820043946570876,
    "acc_norm": 0.33530106257378983,
    "acc_norm_stderr": 0.016230981232989817
  }
} 
 | 
	{
  "all": 0,
  "harness|ko_arc_challenge|25": 0,
  "harness|ko_hellaswag|10": 0,
  "harness|ko_mmlu_world_religions|5": 1,
  "harness|ko_mmlu_management|5": 1,
  "harness|ko_mmlu_miscellaneous|5": 1,
  "harness|ko_mmlu_anatomy|5": 1,
  "harness|ko_mmlu_abstract_algebra|5": 1,
  "harness|ko_mmlu_conceptual_physics|5": 1,
  "harness|ko_mmlu_virology|5": 1,
  "harness|ko_mmlu_philosophy|5": 1,
  "harness|ko_mmlu_human_aging|5": 1,
  "harness|ko_mmlu_human_sexuality|5": 1,
  "harness|ko_mmlu_medical_genetics|5": 1,
  "harness|ko_mmlu_high_school_geography|5": 1,
  "harness|ko_mmlu_electrical_engineering|5": 1,
  "harness|ko_mmlu_college_physics|5": 1,
  "harness|ko_mmlu_high_school_microeconomics|5": 1,
  "harness|ko_mmlu_high_school_macroeconomics|5": 1,
  "harness|ko_mmlu_computer_security|5": 1,
  "harness|ko_mmlu_global_facts|5": 1,
  "harness|ko_mmlu_jurisprudence|5": 1,
  "harness|ko_mmlu_high_school_chemistry|5": 1,
  "harness|ko_mmlu_high_school_biology|5": 1,
  "harness|ko_mmlu_marketing|5": 1,
  "harness|ko_mmlu_clinical_knowledge|5": 1,
  "harness|ko_mmlu_public_relations|5": 1,
  "harness|ko_mmlu_high_school_mathematics|5": 1,
  "harness|ko_mmlu_high_school_physics|5": 1,
  "harness|ko_mmlu_sociology|5": 1,
  "harness|ko_mmlu_college_medicine|5": 1,
  "harness|ko_mmlu_elementary_mathematics|5": 1,
  "harness|ko_mmlu_college_biology|5": 1,
  "harness|ko_mmlu_college_chemistry|5": 1,
  "harness|ko_mmlu_us_foreign_policy|5": 1,
  "harness|ko_mmlu_moral_disputes|5": 1,
  "harness|ko_mmlu_logical_fallacies|5": 1,
  "harness|ko_mmlu_prehistory|5": 1,
  "harness|ko_mmlu_college_mathematics|5": 1,
  "harness|ko_mmlu_high_school_government_and_politics|5": 1,
  "harness|ko_mmlu_econometrics|5": 1,
  "harness|ko_mmlu_high_school_psychology|5": 1,
  "harness|ko_mmlu_formal_logic|5": 1,
  "harness|ko_mmlu_nutrition|5": 1,
  "harness|ko_mmlu_business_ethics|5": 1,
  "harness|ko_mmlu_international_law|5": 1,
  "harness|ko_mmlu_astronomy|5": 1,
  "harness|ko_mmlu_professional_psychology|5": 1,
  "harness|ko_mmlu_professional_accounting|5": 1,
  "harness|ko_mmlu_machine_learning|5": 1,
  "harness|ko_mmlu_high_school_statistics|5": 1,
  "harness|ko_mmlu_moral_scenarios|5": 1,
  "harness|ko_mmlu_college_computer_science|5": 1,
  "harness|ko_mmlu_high_school_computer_science|5": 1,
  "harness|ko_mmlu_professional_medicine|5": 1,
  "harness|ko_mmlu_security_studies|5": 1,
  "harness|ko_mmlu_high_school_world_history|5": 1,
  "harness|ko_mmlu_professional_law|5": 1,
  "harness|ko_mmlu_high_school_us_history|5": 1,
  "harness|ko_mmlu_high_school_european_history|5": 1,
  "harness|ko_truthfulqa_mc|0": 0,
  "harness|ko_commongen_v2|2": 1
} 
 | 
	{
  "model_name": "beomi/llama-2-ko-7b-emb-dev",
  "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684",
  "model_dtype": "torch.float16",
  "lighteval_sha": "",
  "num_few_shot_default": 0,
  "num_fewshot_seeds": 1,
  "override_batch_size": 1,
  "max_samples": null
} 
 | 
					
	{
  "harness|ko_arc_challenge|25": {
    "acc": 0.32593856655290104,
    "acc_stderr": 0.013697432466693239,
    "acc_norm": 0.40273037542662116,
    "acc_norm_stderr": 0.014332236306790147
  },
  "harness|ko_hellaswag|10": {
    "acc": 0.3641704839673372,
    "acc_stderr": 0.004802133511654224,
    "acc_norm": 0.49083847839075884,
    "acc_norm_stderr": 0.004988943721711207
  },
  "harness|ko_mmlu_world_religions|5": {
    "acc": 0.24561403508771928,
    "acc_stderr": 0.03301405946987249,
    "acc_norm": 0.24561403508771928,
    "acc_norm_stderr": 0.03301405946987249
  },
  "harness|ko_mmlu_management|5": {
    "acc": 0.21359223300970873,
    "acc_stderr": 0.04058042015646034,
    "acc_norm": 0.21359223300970873,
    "acc_norm_stderr": 0.04058042015646034
  },
  "harness|ko_mmlu_miscellaneous|5": {
    "acc": 0.30395913154533843,
    "acc_stderr": 0.016448321686769043,
    "acc_norm": 0.30395913154533843,
    "acc_norm_stderr": 0.016448321686769043
  },
  "harness|ko_mmlu_anatomy|5": {
    "acc": 0.3333333333333333,
    "acc_stderr": 0.04072314811876837,
    "acc_norm": 0.3333333333333333,
    "acc_norm_stderr": 0.04072314811876837
  },
  "harness|ko_mmlu_abstract_algebra|5": {
    "acc": 0.3,
    "acc_stderr": 0.046056618647183814,
    "acc_norm": 0.3,
    "acc_norm_stderr": 0.046056618647183814
  },
  "harness|ko_mmlu_conceptual_physics|5": {
    "acc": 0.32340425531914896,
    "acc_stderr": 0.03057944277361034,
    "acc_norm": 0.32340425531914896,
    "acc_norm_stderr": 0.03057944277361034
  },
  "harness|ko_mmlu_virology|5": {
    "acc": 0.2710843373493976,
    "acc_stderr": 0.03460579907553027,
    "acc_norm": 0.2710843373493976,
    "acc_norm_stderr": 0.03460579907553027
  },
  "harness|ko_mmlu_philosophy|5": {
    "acc": 0.2829581993569132,
    "acc_stderr": 0.02558306248998484,
    "acc_norm": 0.2829581993569132,
    "acc_norm_stderr": 0.02558306248998484
  },
  "harness|ko_mmlu_human_aging|5": {
    "acc": 0.4080717488789238,
    "acc_stderr": 0.03298574607842822,
    "acc_norm": 0.4080717488789238,
    "acc_norm_stderr": 0.03298574607842822
  },
  "harness|ko_mmlu_human_sexuality|5": {
    "acc": 0.2748091603053435,
    "acc_stderr": 0.039153454088478354,
    "acc_norm": 0.2748091603053435,
    "acc_norm_stderr": 0.039153454088478354
  },
  "harness|ko_mmlu_medical_genetics|5": {
    "acc": 0.27,
    "acc_stderr": 0.044619604333847394,
    "acc_norm": 0.27,
    "acc_norm_stderr": 0.044619604333847394
  },
  "harness|ko_mmlu_high_school_geography|5": {
    "acc": 0.23232323232323232,
    "acc_stderr": 0.030088629490217483,
    "acc_norm": 0.23232323232323232,
    "acc_norm_stderr": 0.030088629490217483
  },
  "harness|ko_mmlu_electrical_engineering|5": {
    "acc": 0.2482758620689655,
    "acc_stderr": 0.03600105692727774,
    "acc_norm": 0.2482758620689655,
    "acc_norm_stderr": 0.03600105692727774
  },
  "harness|ko_mmlu_college_physics|5": {
    "acc": 0.19607843137254902,
    "acc_stderr": 0.039505818611799616,
    "acc_norm": 0.19607843137254902,
    "acc_norm_stderr": 0.039505818611799616
  },
  "harness|ko_mmlu_high_school_microeconomics|5": {
    "acc": 0.2605042016806723,
    "acc_stderr": 0.028510251512341923,
    "acc_norm": 0.2605042016806723,
    "acc_norm_stderr": 0.028510251512341923
  },
  "harness|ko_mmlu_high_school_macroeconomics|5": {
    "acc": 0.26153846153846155,
    "acc_stderr": 0.02228214120420443,
    "acc_norm": 0.26153846153846155,
    "acc_norm_stderr": 0.02228214120420443
  },
  "harness|ko_mmlu_computer_security|5": {
    "acc": 0.32,
    "acc_stderr": 0.04688261722621505,
    "acc_norm": 0.32,
    "acc_norm_stderr": 0.04688261722621505
  },
  "harness|ko_mmlu_global_facts|5": {
    "acc": 0.32,
    "acc_stderr": 0.04688261722621504,
    "acc_norm": 0.32,
    "acc_norm_stderr": 0.04688261722621504
  },
  "harness|ko_mmlu_jurisprudence|5": {
    "acc": 0.3055555555555556,
    "acc_stderr": 0.044531975073749834,
    "acc_norm": 0.3055555555555556,
    "acc_norm_stderr": 0.044531975073749834
  },
  "harness|ko_mmlu_high_school_chemistry|5": {
    "acc": 0.2315270935960591,
    "acc_stderr": 0.02967833314144446,
    "acc_norm": 0.2315270935960591,
    "acc_norm_stderr": 0.02967833314144446
  },
  "harness|ko_mmlu_high_school_biology|5": {
    "acc": 0.3032258064516129,
    "acc_stderr": 0.026148685930671742,
    "acc_norm": 0.3032258064516129,
    "acc_norm_stderr": 0.026148685930671742
  },
  "harness|ko_mmlu_marketing|5": {
    "acc": 0.2692307692307692,
    "acc_stderr": 0.029058588303748845,
    "acc_norm": 0.2692307692307692,
    "acc_norm_stderr": 0.029058588303748845
  },
  "harness|ko_mmlu_clinical_knowledge|5": {
    "acc": 0.2830188679245283,
    "acc_stderr": 0.0277242364927009,
    "acc_norm": 0.2830188679245283,
    "acc_norm_stderr": 0.0277242364927009
  },
  "harness|ko_mmlu_public_relations|5": {
    "acc": 0.39090909090909093,
    "acc_stderr": 0.04673752333670237,
    "acc_norm": 0.39090909090909093,
    "acc_norm_stderr": 0.04673752333670237
  },
  "harness|ko_mmlu_high_school_mathematics|5": {
    "acc": 0.24814814814814815,
    "acc_stderr": 0.0263357394040558,
    "acc_norm": 0.24814814814814815,
    "acc_norm_stderr": 0.0263357394040558
  },
  "harness|ko_mmlu_high_school_physics|5": {
    "acc": 0.23841059602649006,
    "acc_stderr": 0.03479185572599661,
    "acc_norm": 0.23841059602649006,
    "acc_norm_stderr": 0.03479185572599661
  },
  "harness|ko_mmlu_sociology|5": {
    "acc": 0.2935323383084577,
    "acc_stderr": 0.03220024104534204,
    "acc_norm": 0.2935323383084577,
    "acc_norm_stderr": 0.03220024104534204
  },
  "harness|ko_mmlu_college_medicine|5": {
    "acc": 0.23121387283236994,
    "acc_stderr": 0.0321473730202947,
    "acc_norm": 0.23121387283236994,
    "acc_norm_stderr": 0.0321473730202947
  },
  "harness|ko_mmlu_elementary_mathematics|5": {
    "acc": 0.25396825396825395,
    "acc_stderr": 0.022418042891113942,
    "acc_norm": 0.25396825396825395,
    "acc_norm_stderr": 0.022418042891113942
  },
  "harness|ko_mmlu_college_biology|5": {
    "acc": 0.3055555555555556,
    "acc_stderr": 0.03852084696008534,
    "acc_norm": 0.3055555555555556,
    "acc_norm_stderr": 0.03852084696008534
  },
  "harness|ko_mmlu_college_chemistry|5": {
    "acc": 0.24,
    "acc_stderr": 0.04292346959909283,
    "acc_norm": 0.24,
    "acc_norm_stderr": 0.04292346959909283
  },
  "harness|ko_mmlu_us_foreign_policy|5": {
    "acc": 0.33,
    "acc_stderr": 0.047258156262526045,
    "acc_norm": 0.33,
    "acc_norm_stderr": 0.047258156262526045
  },
  "harness|ko_mmlu_moral_disputes|5": {
    "acc": 0.24566473988439305,
    "acc_stderr": 0.02317629820399201,
    "acc_norm": 0.24566473988439305,
    "acc_norm_stderr": 0.02317629820399201
  },
  "harness|ko_mmlu_logical_fallacies|5": {
    "acc": 0.27607361963190186,
    "acc_stderr": 0.03512385283705051,
    "acc_norm": 0.27607361963190186,
    "acc_norm_stderr": 0.03512385283705051
  },
  "harness|ko_mmlu_prehistory|5": {
    "acc": 0.2777777777777778,
    "acc_stderr": 0.02492200116888633,
    "acc_norm": 0.2777777777777778,
    "acc_norm_stderr": 0.02492200116888633
  },
  "harness|ko_mmlu_college_mathematics|5": {
    "acc": 0.23,
    "acc_stderr": 0.04229525846816506,
    "acc_norm": 0.23,
    "acc_norm_stderr": 0.04229525846816506
  },
  "harness|ko_mmlu_high_school_government_and_politics|5": {
    "acc": 0.20207253886010362,
    "acc_stderr": 0.02897908979429673,
    "acc_norm": 0.20207253886010362,
    "acc_norm_stderr": 0.02897908979429673
  },
  "harness|ko_mmlu_econometrics|5": {
    "acc": 0.2631578947368421,
    "acc_stderr": 0.041424397194893624,
    "acc_norm": 0.2631578947368421,
    "acc_norm_stderr": 0.041424397194893624
  },
  "harness|ko_mmlu_high_school_psychology|5": {
    "acc": 0.24220183486238533,
    "acc_stderr": 0.01836817630659862,
    "acc_norm": 0.24220183486238533,
    "acc_norm_stderr": 0.01836817630659862
  },
  "harness|ko_mmlu_formal_logic|5": {
    "acc": 0.20634920634920634,
    "acc_stderr": 0.036196045241242494,
    "acc_norm": 0.20634920634920634,
    "acc_norm_stderr": 0.036196045241242494
  },
  "harness|ko_mmlu_nutrition|5": {
    "acc": 0.27450980392156865,
    "acc_stderr": 0.02555316999182652,
    "acc_norm": 0.27450980392156865,
    "acc_norm_stderr": 0.02555316999182652
  },
  "harness|ko_mmlu_business_ethics|5": {
    "acc": 0.24,
    "acc_stderr": 0.04292346959909284,
    "acc_norm": 0.24,
    "acc_norm_stderr": 0.04292346959909284
  },
  "harness|ko_mmlu_international_law|5": {
    "acc": 0.36363636363636365,
    "acc_stderr": 0.043913262867240704,
    "acc_norm": 0.36363636363636365,
    "acc_norm_stderr": 0.043913262867240704
  },
  "harness|ko_mmlu_astronomy|5": {
    "acc": 0.18421052631578946,
    "acc_stderr": 0.0315469804508223,
    "acc_norm": 0.18421052631578946,
    "acc_norm_stderr": 0.0315469804508223
  },
  "harness|ko_mmlu_professional_psychology|5": {
    "acc": 0.2761437908496732,
    "acc_stderr": 0.018087276935663133,
    "acc_norm": 0.2761437908496732,
    "acc_norm_stderr": 0.018087276935663133
  },
  "harness|ko_mmlu_professional_accounting|5": {
    "acc": 0.26595744680851063,
    "acc_stderr": 0.026358065698880585,
    "acc_norm": 0.26595744680851063,
    "acc_norm_stderr": 0.026358065698880585
  },
  "harness|ko_mmlu_machine_learning|5": {
    "acc": 0.26785714285714285,
    "acc_stderr": 0.04203277291467764,
    "acc_norm": 0.26785714285714285,
    "acc_norm_stderr": 0.04203277291467764
  },
  "harness|ko_mmlu_high_school_statistics|5": {
    "acc": 0.2361111111111111,
    "acc_stderr": 0.028963702570791044,
    "acc_norm": 0.2361111111111111,
    "acc_norm_stderr": 0.028963702570791044
  },
  "harness|ko_mmlu_moral_scenarios|5": {
    "acc": 0.2424581005586592,
    "acc_stderr": 0.01433352205921789,
    "acc_norm": 0.2424581005586592,
    "acc_norm_stderr": 0.01433352205921789
  },
  "harness|ko_mmlu_college_computer_science|5": {
    "acc": 0.22,
    "acc_stderr": 0.04163331998932269,
    "acc_norm": 0.22,
    "acc_norm_stderr": 0.04163331998932269
  },
  "harness|ko_mmlu_high_school_computer_science|5": {
    "acc": 0.28,
    "acc_stderr": 0.04512608598542127,
    "acc_norm": 0.28,
    "acc_norm_stderr": 0.04512608598542127
  },
  "harness|ko_mmlu_professional_medicine|5": {
    "acc": 0.24632352941176472,
    "acc_stderr": 0.02617343857052,
    "acc_norm": 0.24632352941176472,
    "acc_norm_stderr": 0.02617343857052
  },
  "harness|ko_mmlu_security_studies|5": {
    "acc": 0.22040816326530613,
    "acc_stderr": 0.0265370453121453,
    "acc_norm": 0.22040816326530613,
    "acc_norm_stderr": 0.0265370453121453
  },
  "harness|ko_mmlu_high_school_world_history|5": {
    "acc": 0.31645569620253167,
    "acc_stderr": 0.03027497488021898,
    "acc_norm": 0.31645569620253167,
    "acc_norm_stderr": 0.03027497488021898
  },
  "harness|ko_mmlu_professional_law|5": {
    "acc": 0.24902216427640156,
    "acc_stderr": 0.01104489226404077,
    "acc_norm": 0.24902216427640156,
    "acc_norm_stderr": 0.01104489226404077
  },
  "harness|ko_mmlu_high_school_us_history|5": {
    "acc": 0.21568627450980393,
    "acc_stderr": 0.028867431449849313,
    "acc_norm": 0.21568627450980393,
    "acc_norm_stderr": 0.028867431449849313
  },
  "harness|ko_mmlu_high_school_european_history|5": {
    "acc": 0.24848484848484848,
    "acc_stderr": 0.03374402644139405,
    "acc_norm": 0.24848484848484848,
    "acc_norm_stderr": 0.03374402644139405
  },
  "harness|ko_truthfulqa_mc|0": {
    "mc1": 0.2558139534883721,
    "mc1_stderr": 0.015274176219283347,
    "mc2": 0.430843038646161,
    "mc2_stderr": 0.015222244438027463
  },
  "harness|ko_commongen_v2|2": {
    "acc": 0.15348288075560804,
    "acc_stderr": 0.012392606565325119,
    "acc_norm": 0.3435655253837072,
    "acc_norm_stderr": 0.016327334806429145
  }
} 
 | 
	{
  "all": 0,
  "harness|ko_arc_challenge|25": 0,
  "harness|ko_hellaswag|10": 0,
  "harness|ko_mmlu_world_religions|5": 1,
  "harness|ko_mmlu_management|5": 1,
  "harness|ko_mmlu_miscellaneous|5": 1,
  "harness|ko_mmlu_anatomy|5": 1,
  "harness|ko_mmlu_abstract_algebra|5": 1,
  "harness|ko_mmlu_conceptual_physics|5": 1,
  "harness|ko_mmlu_virology|5": 1,
  "harness|ko_mmlu_philosophy|5": 1,
  "harness|ko_mmlu_human_aging|5": 1,
  "harness|ko_mmlu_human_sexuality|5": 1,
  "harness|ko_mmlu_medical_genetics|5": 1,
  "harness|ko_mmlu_high_school_geography|5": 1,
  "harness|ko_mmlu_electrical_engineering|5": 1,
  "harness|ko_mmlu_college_physics|5": 1,
  "harness|ko_mmlu_high_school_microeconomics|5": 1,
  "harness|ko_mmlu_high_school_macroeconomics|5": 1,
  "harness|ko_mmlu_computer_security|5": 1,
  "harness|ko_mmlu_global_facts|5": 1,
  "harness|ko_mmlu_jurisprudence|5": 1,
  "harness|ko_mmlu_high_school_chemistry|5": 1,
  "harness|ko_mmlu_high_school_biology|5": 1,
  "harness|ko_mmlu_marketing|5": 1,
  "harness|ko_mmlu_clinical_knowledge|5": 1,
  "harness|ko_mmlu_public_relations|5": 1,
  "harness|ko_mmlu_high_school_mathematics|5": 1,
  "harness|ko_mmlu_high_school_physics|5": 1,
  "harness|ko_mmlu_sociology|5": 1,
  "harness|ko_mmlu_college_medicine|5": 1,
  "harness|ko_mmlu_elementary_mathematics|5": 1,
  "harness|ko_mmlu_college_biology|5": 1,
  "harness|ko_mmlu_college_chemistry|5": 1,
  "harness|ko_mmlu_us_foreign_policy|5": 1,
  "harness|ko_mmlu_moral_disputes|5": 1,
  "harness|ko_mmlu_logical_fallacies|5": 1,
  "harness|ko_mmlu_prehistory|5": 1,
  "harness|ko_mmlu_college_mathematics|5": 1,
  "harness|ko_mmlu_high_school_government_and_politics|5": 1,
  "harness|ko_mmlu_econometrics|5": 1,
  "harness|ko_mmlu_high_school_psychology|5": 1,
  "harness|ko_mmlu_formal_logic|5": 1,
  "harness|ko_mmlu_nutrition|5": 1,
  "harness|ko_mmlu_business_ethics|5": 1,
  "harness|ko_mmlu_international_law|5": 1,
  "harness|ko_mmlu_astronomy|5": 1,
  "harness|ko_mmlu_professional_psychology|5": 1,
  "harness|ko_mmlu_professional_accounting|5": 1,
  "harness|ko_mmlu_machine_learning|5": 1,
  "harness|ko_mmlu_high_school_statistics|5": 1,
  "harness|ko_mmlu_moral_scenarios|5": 1,
  "harness|ko_mmlu_college_computer_science|5": 1,
  "harness|ko_mmlu_high_school_computer_science|5": 1,
  "harness|ko_mmlu_professional_medicine|5": 1,
  "harness|ko_mmlu_security_studies|5": 1,
  "harness|ko_mmlu_high_school_world_history|5": 1,
  "harness|ko_mmlu_professional_law|5": 1,
  "harness|ko_mmlu_high_school_us_history|5": 1,
  "harness|ko_mmlu_high_school_european_history|5": 1,
  "harness|ko_truthfulqa_mc|0": 0,
  "harness|ko_commongen_v2|2": 1
} 
 | 
	{
  "model_name": "beomi/llama-2-ko-7b-emb-dev",
  "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684",
  "model_dtype": "torch.float16",
  "lighteval_sha": "",
  "num_few_shot_default": 0,
  "num_fewshot_seeds": 1,
  "override_batch_size": 1,
  "max_samples": null
} 
 | 
					
	{
  "harness|ko_arc_challenge|25": {
    "acc": 0.3387372013651877,
    "acc_stderr": 0.013830568927974334,
    "acc_norm": 0.4197952218430034,
    "acc_norm_stderr": 0.014422181226303031
  },
  "harness|ko_hellaswag|10": {
    "acc": 0.364070902210715,
    "acc_stderr": 0.0048018528813297484,
    "acc_norm": 0.49741087432782316,
    "acc_norm_stderr": 0.004989714512282407
  },
  "harness|ko_mmlu_world_religions|5": {
    "acc": 0.21637426900584794,
    "acc_stderr": 0.03158149539338733,
    "acc_norm": 0.21637426900584794,
    "acc_norm_stderr": 0.03158149539338733
  },
  "harness|ko_mmlu_management|5": {
    "acc": 0.2621359223300971,
    "acc_stderr": 0.043546310772605956,
    "acc_norm": 0.2621359223300971,
    "acc_norm_stderr": 0.043546310772605956
  },
  "harness|ko_mmlu_miscellaneous|5": {
    "acc": 0.3052362707535121,
    "acc_stderr": 0.016467711947635123,
    "acc_norm": 0.3052362707535121,
    "acc_norm_stderr": 0.016467711947635123
  },
  "harness|ko_mmlu_anatomy|5": {
    "acc": 0.37777777777777777,
    "acc_stderr": 0.04188307537595852,
    "acc_norm": 0.37777777777777777,
    "acc_norm_stderr": 0.04188307537595852
  },
  "harness|ko_mmlu_abstract_algebra|5": {
    "acc": 0.3,
    "acc_stderr": 0.046056618647183814,
    "acc_norm": 0.3,
    "acc_norm_stderr": 0.046056618647183814
  },
  "harness|ko_mmlu_conceptual_physics|5": {
    "acc": 0.3404255319148936,
    "acc_stderr": 0.03097669299853443,
    "acc_norm": 0.3404255319148936,
    "acc_norm_stderr": 0.03097669299853443
  },
  "harness|ko_mmlu_virology|5": {
    "acc": 0.3674698795180723,
    "acc_stderr": 0.03753267402120574,
    "acc_norm": 0.3674698795180723,
    "acc_norm_stderr": 0.03753267402120574
  },
  "harness|ko_mmlu_philosophy|5": {
    "acc": 0.28938906752411575,
    "acc_stderr": 0.025755865922632924,
    "acc_norm": 0.28938906752411575,
    "acc_norm_stderr": 0.025755865922632924
  },
  "harness|ko_mmlu_human_aging|5": {
    "acc": 0.3811659192825112,
    "acc_stderr": 0.032596251184168264,
    "acc_norm": 0.3811659192825112,
    "acc_norm_stderr": 0.032596251184168264
  },
  "harness|ko_mmlu_human_sexuality|5": {
    "acc": 0.26717557251908397,
    "acc_stderr": 0.038808483010823944,
    "acc_norm": 0.26717557251908397,
    "acc_norm_stderr": 0.038808483010823944
  },
  "harness|ko_mmlu_medical_genetics|5": {
    "acc": 0.26,
    "acc_stderr": 0.04408440022768078,
    "acc_norm": 0.26,
    "acc_norm_stderr": 0.04408440022768078
  },
  "harness|ko_mmlu_high_school_geography|5": {
    "acc": 0.25757575757575757,
    "acc_stderr": 0.031156269519646826,
    "acc_norm": 0.25757575757575757,
    "acc_norm_stderr": 0.031156269519646826
  },
  "harness|ko_mmlu_electrical_engineering|5": {
    "acc": 0.27586206896551724,
    "acc_stderr": 0.037245636197746325,
    "acc_norm": 0.27586206896551724,
    "acc_norm_stderr": 0.037245636197746325
  },
  "harness|ko_mmlu_college_physics|5": {
    "acc": 0.19607843137254902,
    "acc_stderr": 0.039505818611799616,
    "acc_norm": 0.19607843137254902,
    "acc_norm_stderr": 0.039505818611799616
  },
  "harness|ko_mmlu_high_school_microeconomics|5": {
    "acc": 0.2815126050420168,
    "acc_stderr": 0.02921354941437216,
    "acc_norm": 0.2815126050420168,
    "acc_norm_stderr": 0.02921354941437216
  },
  "harness|ko_mmlu_high_school_macroeconomics|5": {
    "acc": 0.26153846153846155,
    "acc_stderr": 0.02228214120420443,
    "acc_norm": 0.26153846153846155,
    "acc_norm_stderr": 0.02228214120420443
  },
  "harness|ko_mmlu_computer_security|5": {
    "acc": 0.29,
    "acc_stderr": 0.045604802157206845,
    "acc_norm": 0.29,
    "acc_norm_stderr": 0.045604802157206845
  },
  "harness|ko_mmlu_global_facts|5": {
    "acc": 0.33,
    "acc_stderr": 0.047258156262526045,
    "acc_norm": 0.33,
    "acc_norm_stderr": 0.047258156262526045
  },
  "harness|ko_mmlu_jurisprudence|5": {
    "acc": 0.25,
    "acc_stderr": 0.04186091791394607,
    "acc_norm": 0.25,
    "acc_norm_stderr": 0.04186091791394607
  },
  "harness|ko_mmlu_high_school_chemistry|5": {
    "acc": 0.3054187192118227,
    "acc_stderr": 0.032406615658684086,
    "acc_norm": 0.3054187192118227,
    "acc_norm_stderr": 0.032406615658684086
  },
  "harness|ko_mmlu_high_school_biology|5": {
    "acc": 0.2870967741935484,
    "acc_stderr": 0.02573654274559453,
    "acc_norm": 0.2870967741935484,
    "acc_norm_stderr": 0.02573654274559453
  },
  "harness|ko_mmlu_marketing|5": {
    "acc": 0.2948717948717949,
    "acc_stderr": 0.029872577708891165,
    "acc_norm": 0.2948717948717949,
    "acc_norm_stderr": 0.029872577708891165
  },
  "harness|ko_mmlu_clinical_knowledge|5": {
    "acc": 0.2981132075471698,
    "acc_stderr": 0.028152837942493875,
    "acc_norm": 0.2981132075471698,
    "acc_norm_stderr": 0.028152837942493875
  },
  "harness|ko_mmlu_public_relations|5": {
    "acc": 0.3,
    "acc_stderr": 0.04389311454644286,
    "acc_norm": 0.3,
    "acc_norm_stderr": 0.04389311454644286
  },
  "harness|ko_mmlu_high_school_mathematics|5": {
    "acc": 0.2814814814814815,
    "acc_stderr": 0.02742001935094527,
    "acc_norm": 0.2814814814814815,
    "acc_norm_stderr": 0.02742001935094527
  },
  "harness|ko_mmlu_high_school_physics|5": {
    "acc": 0.23178807947019867,
    "acc_stderr": 0.034454062719870546,
    "acc_norm": 0.23178807947019867,
    "acc_norm_stderr": 0.034454062719870546
  },
  "harness|ko_mmlu_sociology|5": {
    "acc": 0.34328358208955223,
    "acc_stderr": 0.03357379665433431,
    "acc_norm": 0.34328358208955223,
    "acc_norm_stderr": 0.03357379665433431
  },
  "harness|ko_mmlu_college_medicine|5": {
    "acc": 0.24277456647398843,
    "acc_stderr": 0.0326926380614177,
    "acc_norm": 0.24277456647398843,
    "acc_norm_stderr": 0.0326926380614177
  },
  "harness|ko_mmlu_elementary_mathematics|5": {
    "acc": 0.25396825396825395,
    "acc_stderr": 0.022418042891113942,
    "acc_norm": 0.25396825396825395,
    "acc_norm_stderr": 0.022418042891113942
  },
  "harness|ko_mmlu_college_biology|5": {
    "acc": 0.3333333333333333,
    "acc_stderr": 0.039420826399272135,
    "acc_norm": 0.3333333333333333,
    "acc_norm_stderr": 0.039420826399272135
  },
  "harness|ko_mmlu_college_chemistry|5": {
    "acc": 0.24,
    "acc_stderr": 0.04292346959909283,
    "acc_norm": 0.24,
    "acc_norm_stderr": 0.04292346959909283
  },
  "harness|ko_mmlu_us_foreign_policy|5": {
    "acc": 0.35,
    "acc_stderr": 0.0479372485441102,
    "acc_norm": 0.35,
    "acc_norm_stderr": 0.0479372485441102
  },
  "harness|ko_mmlu_moral_disputes|5": {
    "acc": 0.27167630057803466,
    "acc_stderr": 0.02394851290546836,
    "acc_norm": 0.27167630057803466,
    "acc_norm_stderr": 0.02394851290546836
  },
  "harness|ko_mmlu_logical_fallacies|5": {
    "acc": 0.25766871165644173,
    "acc_stderr": 0.03436150827846917,
    "acc_norm": 0.25766871165644173,
    "acc_norm_stderr": 0.03436150827846917
  },
  "harness|ko_mmlu_prehistory|5": {
    "acc": 0.28703703703703703,
    "acc_stderr": 0.025171041915309684,
    "acc_norm": 0.28703703703703703,
    "acc_norm_stderr": 0.025171041915309684
  },
  "harness|ko_mmlu_college_mathematics|5": {
    "acc": 0.31,
    "acc_stderr": 0.04648231987117316,
    "acc_norm": 0.31,
    "acc_norm_stderr": 0.04648231987117316
  },
  "harness|ko_mmlu_high_school_government_and_politics|5": {
    "acc": 0.21761658031088082,
    "acc_stderr": 0.029778663037752954,
    "acc_norm": 0.21761658031088082,
    "acc_norm_stderr": 0.029778663037752954
  },
  "harness|ko_mmlu_econometrics|5": {
    "acc": 0.24561403508771928,
    "acc_stderr": 0.04049339297748142,
    "acc_norm": 0.24561403508771928,
    "acc_norm_stderr": 0.04049339297748142
  },
  "harness|ko_mmlu_high_school_psychology|5": {
    "acc": 0.26605504587155965,
    "acc_stderr": 0.018946022322225597,
    "acc_norm": 0.26605504587155965,
    "acc_norm_stderr": 0.018946022322225597
  },
  "harness|ko_mmlu_formal_logic|5": {
    "acc": 0.21428571428571427,
    "acc_stderr": 0.03670066451047182,
    "acc_norm": 0.21428571428571427,
    "acc_norm_stderr": 0.03670066451047182
  },
  "harness|ko_mmlu_nutrition|5": {
    "acc": 0.2908496732026144,
    "acc_stderr": 0.026004800363952113,
    "acc_norm": 0.2908496732026144,
    "acc_norm_stderr": 0.026004800363952113
  },
  "harness|ko_mmlu_business_ethics|5": {
    "acc": 0.26,
    "acc_stderr": 0.044084400227680794,
    "acc_norm": 0.26,
    "acc_norm_stderr": 0.044084400227680794
  },
  "harness|ko_mmlu_international_law|5": {
    "acc": 0.39669421487603307,
    "acc_stderr": 0.044658697805310094,
    "acc_norm": 0.39669421487603307,
    "acc_norm_stderr": 0.044658697805310094
  },
  "harness|ko_mmlu_astronomy|5": {
    "acc": 0.19078947368421054,
    "acc_stderr": 0.031975658210325,
    "acc_norm": 0.19078947368421054,
    "acc_norm_stderr": 0.031975658210325
  },
  "harness|ko_mmlu_professional_psychology|5": {
    "acc": 0.25326797385620914,
    "acc_stderr": 0.017593486895366828,
    "acc_norm": 0.25326797385620914,
    "acc_norm_stderr": 0.017593486895366828
  },
  "harness|ko_mmlu_professional_accounting|5": {
    "acc": 0.25886524822695034,
    "acc_stderr": 0.026129572527180848,
    "acc_norm": 0.25886524822695034,
    "acc_norm_stderr": 0.026129572527180848
  },
  "harness|ko_mmlu_machine_learning|5": {
    "acc": 0.2857142857142857,
    "acc_stderr": 0.042878587513404565,
    "acc_norm": 0.2857142857142857,
    "acc_norm_stderr": 0.042878587513404565
  },
  "harness|ko_mmlu_high_school_statistics|5": {
    "acc": 0.20833333333333334,
    "acc_stderr": 0.02769691071309394,
    "acc_norm": 0.20833333333333334,
    "acc_norm_stderr": 0.02769691071309394
  },
  "harness|ko_mmlu_moral_scenarios|5": {
    "acc": 0.2424581005586592,
    "acc_stderr": 0.01433352205921789,
    "acc_norm": 0.2424581005586592,
    "acc_norm_stderr": 0.01433352205921789
  },
  "harness|ko_mmlu_college_computer_science|5": {
    "acc": 0.23,
    "acc_stderr": 0.04229525846816507,
    "acc_norm": 0.23,
    "acc_norm_stderr": 0.04229525846816507
  },
  "harness|ko_mmlu_high_school_computer_science|5": {
    "acc": 0.28,
    "acc_stderr": 0.045126085985421276,
    "acc_norm": 0.28,
    "acc_norm_stderr": 0.045126085985421276
  },
  "harness|ko_mmlu_professional_medicine|5": {
    "acc": 0.31985294117647056,
    "acc_stderr": 0.028332959514031218,
    "acc_norm": 0.31985294117647056,
    "acc_norm_stderr": 0.028332959514031218
  },
  "harness|ko_mmlu_security_studies|5": {
    "acc": 0.2653061224489796,
    "acc_stderr": 0.02826388994378461,
    "acc_norm": 0.2653061224489796,
    "acc_norm_stderr": 0.02826388994378461
  },
  "harness|ko_mmlu_high_school_world_history|5": {
    "acc": 0.27848101265822783,
    "acc_stderr": 0.029178682304842544,
    "acc_norm": 0.27848101265822783,
    "acc_norm_stderr": 0.029178682304842544
  },
  "harness|ko_mmlu_professional_law|5": {
    "acc": 0.26401564537157757,
    "acc_stderr": 0.011258435537723814,
    "acc_norm": 0.26401564537157757,
    "acc_norm_stderr": 0.011258435537723814
  },
  "harness|ko_mmlu_high_school_us_history|5": {
    "acc": 0.24019607843137256,
    "acc_stderr": 0.02998373305591361,
    "acc_norm": 0.24019607843137256,
    "acc_norm_stderr": 0.02998373305591361
  },
  "harness|ko_mmlu_high_school_european_history|5": {
    "acc": 0.2545454545454545,
    "acc_stderr": 0.03401506715249039,
    "acc_norm": 0.2545454545454545,
    "acc_norm_stderr": 0.03401506715249039
  },
  "harness|ko_truthfulqa_mc|0": {
    "mc1": 0.2521419828641371,
    "mc1_stderr": 0.015201522246299953,
    "mc2": 0.4275383331125476,
    "mc2_stderr": 0.01526305656191646
  },
  "harness|ko_commongen_v2|2": {
    "acc": 0.14403778040141677,
    "acc_stderr": 0.012072030576668953,
    "acc_norm": 0.3707201889020071,
    "acc_norm_stderr": 0.016605801289212598
  }
} 
 | 
	{
  "all": 0,
  "harness|ko_arc_challenge|25": 0,
  "harness|ko_hellaswag|10": 0,
  "harness|ko_mmlu_world_religions|5": 1,
  "harness|ko_mmlu_management|5": 1,
  "harness|ko_mmlu_miscellaneous|5": 1,
  "harness|ko_mmlu_anatomy|5": 1,
  "harness|ko_mmlu_abstract_algebra|5": 1,
  "harness|ko_mmlu_conceptual_physics|5": 1,
  "harness|ko_mmlu_virology|5": 1,
  "harness|ko_mmlu_philosophy|5": 1,
  "harness|ko_mmlu_human_aging|5": 1,
  "harness|ko_mmlu_human_sexuality|5": 1,
  "harness|ko_mmlu_medical_genetics|5": 1,
  "harness|ko_mmlu_high_school_geography|5": 1,
  "harness|ko_mmlu_electrical_engineering|5": 1,
  "harness|ko_mmlu_college_physics|5": 1,
  "harness|ko_mmlu_high_school_microeconomics|5": 1,
  "harness|ko_mmlu_high_school_macroeconomics|5": 1,
  "harness|ko_mmlu_computer_security|5": 1,
  "harness|ko_mmlu_global_facts|5": 1,
  "harness|ko_mmlu_jurisprudence|5": 1,
  "harness|ko_mmlu_high_school_chemistry|5": 1,
  "harness|ko_mmlu_high_school_biology|5": 1,
  "harness|ko_mmlu_marketing|5": 1,
  "harness|ko_mmlu_clinical_knowledge|5": 1,
  "harness|ko_mmlu_public_relations|5": 1,
  "harness|ko_mmlu_high_school_mathematics|5": 1,
  "harness|ko_mmlu_high_school_physics|5": 1,
  "harness|ko_mmlu_sociology|5": 1,
  "harness|ko_mmlu_college_medicine|5": 1,
  "harness|ko_mmlu_elementary_mathematics|5": 1,
  "harness|ko_mmlu_college_biology|5": 1,
  "harness|ko_mmlu_college_chemistry|5": 1,
  "harness|ko_mmlu_us_foreign_policy|5": 1,
  "harness|ko_mmlu_moral_disputes|5": 1,
  "harness|ko_mmlu_logical_fallacies|5": 1,
  "harness|ko_mmlu_prehistory|5": 1,
  "harness|ko_mmlu_college_mathematics|5": 1,
  "harness|ko_mmlu_high_school_government_and_politics|5": 1,
  "harness|ko_mmlu_econometrics|5": 1,
  "harness|ko_mmlu_high_school_psychology|5": 1,
  "harness|ko_mmlu_formal_logic|5": 1,
  "harness|ko_mmlu_nutrition|5": 1,
  "harness|ko_mmlu_business_ethics|5": 1,
  "harness|ko_mmlu_international_law|5": 1,
  "harness|ko_mmlu_astronomy|5": 1,
  "harness|ko_mmlu_professional_psychology|5": 1,
  "harness|ko_mmlu_professional_accounting|5": 1,
  "harness|ko_mmlu_machine_learning|5": 1,
  "harness|ko_mmlu_high_school_statistics|5": 1,
  "harness|ko_mmlu_moral_scenarios|5": 1,
  "harness|ko_mmlu_college_computer_science|5": 1,
  "harness|ko_mmlu_high_school_computer_science|5": 1,
  "harness|ko_mmlu_professional_medicine|5": 1,
  "harness|ko_mmlu_security_studies|5": 1,
  "harness|ko_mmlu_high_school_world_history|5": 1,
  "harness|ko_mmlu_professional_law|5": 1,
  "harness|ko_mmlu_high_school_us_history|5": 1,
  "harness|ko_mmlu_high_school_european_history|5": 1,
  "harness|ko_truthfulqa_mc|0": 0,
  "harness|ko_commongen_v2|2": 1
} 
 | 
	{
  "model_name": "beomi/llama-2-ko-7b-emb-dev",
  "model_sha": "d0e8d08d5f41082f3f48ec990edc2eb521ac2e73",
  "model_dtype": "torch.float16",
  "lighteval_sha": "",
  "num_few_shot_default": 0,
  "num_fewshot_seeds": 1,
  "override_batch_size": 1,
  "max_samples": null
} 
 | 
					
	{
  "harness|ko_arc_challenge|25": {
    "acc": 0.32593856655290104,
    "acc_stderr": 0.013697432466693246,
    "acc_norm": 0.38139931740614336,
    "acc_norm_stderr": 0.014194389086685272
  },
  "harness|ko_hellaswag|10": {
    "acc": 0.3543118900617407,
    "acc_stderr": 0.0047732675101127406,
    "acc_norm": 0.4435371439952201,
    "acc_norm_stderr": 0.004957863944093121
  },
  "harness|ko_mmlu_world_religions|5": {
    "acc": 0.45614035087719296,
    "acc_stderr": 0.03820042586602966,
    "acc_norm": 0.45614035087719296,
    "acc_norm_stderr": 0.03820042586602966
  },
  "harness|ko_mmlu_management|5": {
    "acc": 0.5339805825242718,
    "acc_stderr": 0.04939291447273481,
    "acc_norm": 0.5339805825242718,
    "acc_norm_stderr": 0.04939291447273481
  },
  "harness|ko_mmlu_miscellaneous|5": {
    "acc": 0.48020434227330777,
    "acc_stderr": 0.017865944827291615,
    "acc_norm": 0.48020434227330777,
    "acc_norm_stderr": 0.017865944827291615
  },
  "harness|ko_mmlu_anatomy|5": {
    "acc": 0.32592592592592595,
    "acc_stderr": 0.04049122041702506,
    "acc_norm": 0.32592592592592595,
    "acc_norm_stderr": 0.04049122041702506
  },
  "harness|ko_mmlu_abstract_algebra|5": {
    "acc": 0.27,
    "acc_stderr": 0.044619604333847415,
    "acc_norm": 0.27,
    "acc_norm_stderr": 0.044619604333847415
  },
  "harness|ko_mmlu_conceptual_physics|5": {
    "acc": 0.4297872340425532,
    "acc_stderr": 0.03236214467715564,
    "acc_norm": 0.4297872340425532,
    "acc_norm_stderr": 0.03236214467715564
  },
  "harness|ko_mmlu_virology|5": {
    "acc": 0.3795180722891566,
    "acc_stderr": 0.037777988227480165,
    "acc_norm": 0.3795180722891566,
    "acc_norm_stderr": 0.037777988227480165
  },
  "harness|ko_mmlu_philosophy|5": {
    "acc": 0.43729903536977494,
    "acc_stderr": 0.02817391776176287,
    "acc_norm": 0.43729903536977494,
    "acc_norm_stderr": 0.02817391776176287
  },
  "harness|ko_mmlu_human_aging|5": {
    "acc": 0.4170403587443946,
    "acc_stderr": 0.03309266936071721,
    "acc_norm": 0.4170403587443946,
    "acc_norm_stderr": 0.03309266936071721
  },
  "harness|ko_mmlu_human_sexuality|5": {
    "acc": 0.4198473282442748,
    "acc_stderr": 0.04328577215262972,
    "acc_norm": 0.4198473282442748,
    "acc_norm_stderr": 0.04328577215262972
  },
  "harness|ko_mmlu_medical_genetics|5": {
    "acc": 0.35,
    "acc_stderr": 0.0479372485441102,
    "acc_norm": 0.35,
    "acc_norm_stderr": 0.0479372485441102
  },
  "harness|ko_mmlu_high_school_geography|5": {
    "acc": 0.48484848484848486,
    "acc_stderr": 0.03560716516531061,
    "acc_norm": 0.48484848484848486,
    "acc_norm_stderr": 0.03560716516531061
  },
  "harness|ko_mmlu_electrical_engineering|5": {
    "acc": 0.35172413793103446,
    "acc_stderr": 0.03979236637497411,
    "acc_norm": 0.35172413793103446,
    "acc_norm_stderr": 0.03979236637497411
  },
  "harness|ko_mmlu_college_physics|5": {
    "acc": 0.23529411764705882,
    "acc_stderr": 0.04220773659171453,
    "acc_norm": 0.23529411764705882,
    "acc_norm_stderr": 0.04220773659171453
  },
  "harness|ko_mmlu_high_school_microeconomics|5": {
    "acc": 0.4327731092436975,
    "acc_stderr": 0.032183581077426124,
    "acc_norm": 0.4327731092436975,
    "acc_norm_stderr": 0.032183581077426124
  },
  "harness|ko_mmlu_high_school_macroeconomics|5": {
    "acc": 0.3923076923076923,
    "acc_stderr": 0.02475600038213094,
    "acc_norm": 0.3923076923076923,
    "acc_norm_stderr": 0.02475600038213094
  },
  "harness|ko_mmlu_computer_security|5": {
    "acc": 0.46,
    "acc_stderr": 0.05009082659620333,
    "acc_norm": 0.46,
    "acc_norm_stderr": 0.05009082659620333
  },
  "harness|ko_mmlu_global_facts|5": {
    "acc": 0.35,
    "acc_stderr": 0.047937248544110196,
    "acc_norm": 0.35,
    "acc_norm_stderr": 0.047937248544110196
  },
  "harness|ko_mmlu_jurisprudence|5": {
    "acc": 0.5370370370370371,
    "acc_stderr": 0.04820403072760628,
    "acc_norm": 0.5370370370370371,
    "acc_norm_stderr": 0.04820403072760628
  },
  "harness|ko_mmlu_high_school_chemistry|5": {
    "acc": 0.29064039408866993,
    "acc_stderr": 0.031947400722655395,
    "acc_norm": 0.29064039408866993,
    "acc_norm_stderr": 0.031947400722655395
  },
  "harness|ko_mmlu_high_school_biology|5": {
    "acc": 0.432258064516129,
    "acc_stderr": 0.028181739720019416,
    "acc_norm": 0.432258064516129,
    "acc_norm_stderr": 0.028181739720019416
  },
  "harness|ko_mmlu_marketing|5": {
    "acc": 0.6196581196581197,
    "acc_stderr": 0.03180425204384099,
    "acc_norm": 0.6196581196581197,
    "acc_norm_stderr": 0.03180425204384099
  },
  "harness|ko_mmlu_clinical_knowledge|5": {
    "acc": 0.4528301886792453,
    "acc_stderr": 0.030635627957961827,
    "acc_norm": 0.4528301886792453,
    "acc_norm_stderr": 0.030635627957961827
  },
  "harness|ko_mmlu_public_relations|5": {
    "acc": 0.5454545454545454,
    "acc_stderr": 0.04769300568972743,
    "acc_norm": 0.5454545454545454,
    "acc_norm_stderr": 0.04769300568972743
  },
  "harness|ko_mmlu_high_school_mathematics|5": {
    "acc": 0.2851851851851852,
    "acc_stderr": 0.027528599210340492,
    "acc_norm": 0.2851851851851852,
    "acc_norm_stderr": 0.027528599210340492
  },
  "harness|ko_mmlu_high_school_physics|5": {
    "acc": 0.2582781456953642,
    "acc_stderr": 0.035737053147634576,
    "acc_norm": 0.2582781456953642,
    "acc_norm_stderr": 0.035737053147634576
  },
  "harness|ko_mmlu_sociology|5": {
    "acc": 0.5472636815920398,
    "acc_stderr": 0.03519702717576915,
    "acc_norm": 0.5472636815920398,
    "acc_norm_stderr": 0.03519702717576915
  },
  "harness|ko_mmlu_college_medicine|5": {
    "acc": 0.36416184971098264,
    "acc_stderr": 0.03669072477416907,
    "acc_norm": 0.36416184971098264,
    "acc_norm_stderr": 0.03669072477416907
  },
  "harness|ko_mmlu_elementary_mathematics|5": {
    "acc": 0.2777777777777778,
    "acc_stderr": 0.02306818884826111,
    "acc_norm": 0.2777777777777778,
    "acc_norm_stderr": 0.02306818884826111
  },
  "harness|ko_mmlu_college_biology|5": {
    "acc": 0.3611111111111111,
    "acc_stderr": 0.040166600304512336,
    "acc_norm": 0.3611111111111111,
    "acc_norm_stderr": 0.040166600304512336
  },
  "harness|ko_mmlu_college_chemistry|5": {
    "acc": 0.28,
    "acc_stderr": 0.04512608598542129,
    "acc_norm": 0.28,
    "acc_norm_stderr": 0.04512608598542129
  },
  "harness|ko_mmlu_us_foreign_policy|5": {
    "acc": 0.55,
    "acc_stderr": 0.05,
    "acc_norm": 0.55,
    "acc_norm_stderr": 0.05
  },
  "harness|ko_mmlu_moral_disputes|5": {
    "acc": 0.5028901734104047,
    "acc_stderr": 0.02691864538323901,
    "acc_norm": 0.5028901734104047,
    "acc_norm_stderr": 0.02691864538323901
  },
  "harness|ko_mmlu_logical_fallacies|5": {
    "acc": 0.36809815950920244,
    "acc_stderr": 0.03789213935838396,
    "acc_norm": 0.36809815950920244,
    "acc_norm_stderr": 0.03789213935838396
  },
  "harness|ko_mmlu_prehistory|5": {
    "acc": 0.4660493827160494,
    "acc_stderr": 0.027756535257347663,
    "acc_norm": 0.4660493827160494,
    "acc_norm_stderr": 0.027756535257347663
  },
  "harness|ko_mmlu_college_mathematics|5": {
    "acc": 0.35,
    "acc_stderr": 0.0479372485441102,
    "acc_norm": 0.35,
    "acc_norm_stderr": 0.0479372485441102
  },
  "harness|ko_mmlu_high_school_government_and_politics|5": {
    "acc": 0.45595854922279794,
    "acc_stderr": 0.03594413711272438,
    "acc_norm": 0.45595854922279794,
    "acc_norm_stderr": 0.03594413711272438
  },
  "harness|ko_mmlu_econometrics|5": {
    "acc": 0.21929824561403508,
    "acc_stderr": 0.03892431106518753,
    "acc_norm": 0.21929824561403508,
    "acc_norm_stderr": 0.03892431106518753
  },
  "harness|ko_mmlu_high_school_psychology|5": {
    "acc": 0.44954128440366975,
    "acc_stderr": 0.021327881417823373,
    "acc_norm": 0.44954128440366975,
    "acc_norm_stderr": 0.021327881417823373
  },
  "harness|ko_mmlu_formal_logic|5": {
    "acc": 0.3253968253968254,
    "acc_stderr": 0.041905964388711366,
    "acc_norm": 0.3253968253968254,
    "acc_norm_stderr": 0.041905964388711366
  },
  "harness|ko_mmlu_nutrition|5": {
    "acc": 0.4117647058823529,
    "acc_stderr": 0.028180596328259287,
    "acc_norm": 0.4117647058823529,
    "acc_norm_stderr": 0.028180596328259287
  },
  "harness|ko_mmlu_business_ethics|5": {
    "acc": 0.48,
    "acc_stderr": 0.050211673156867795,
    "acc_norm": 0.48,
    "acc_norm_stderr": 0.050211673156867795
  },
  "harness|ko_mmlu_international_law|5": {
    "acc": 0.6363636363636364,
    "acc_stderr": 0.043913262867240704,
    "acc_norm": 0.6363636363636364,
    "acc_norm_stderr": 0.043913262867240704
  },
  "harness|ko_mmlu_astronomy|5": {
    "acc": 0.35526315789473684,
    "acc_stderr": 0.038947344870133176,
    "acc_norm": 0.35526315789473684,
    "acc_norm_stderr": 0.038947344870133176
  },
  "harness|ko_mmlu_professional_psychology|5": {
    "acc": 0.3349673202614379,
    "acc_stderr": 0.01909422816700031,
    "acc_norm": 0.3349673202614379,
    "acc_norm_stderr": 0.01909422816700031
  },
  "harness|ko_mmlu_professional_accounting|5": {
    "acc": 0.3333333333333333,
    "acc_stderr": 0.02812163604063989,
    "acc_norm": 0.3333333333333333,
    "acc_norm_stderr": 0.02812163604063989
  },
  "harness|ko_mmlu_machine_learning|5": {
    "acc": 0.2857142857142857,
    "acc_stderr": 0.04287858751340456,
    "acc_norm": 0.2857142857142857,
    "acc_norm_stderr": 0.04287858751340456
  },
  "harness|ko_mmlu_high_school_statistics|5": {
    "acc": 0.3333333333333333,
    "acc_stderr": 0.03214952147802747,
    "acc_norm": 0.3333333333333333,
    "acc_norm_stderr": 0.03214952147802747
  },
  "harness|ko_mmlu_moral_scenarios|5": {
    "acc": 0.25921787709497207,
    "acc_stderr": 0.014655780837497722,
    "acc_norm": 0.25921787709497207,
    "acc_norm_stderr": 0.014655780837497722
  },
  "harness|ko_mmlu_college_computer_science|5": {
    "acc": 0.3,
    "acc_stderr": 0.046056618647183814,
    "acc_norm": 0.3,
    "acc_norm_stderr": 0.046056618647183814
  },
  "harness|ko_mmlu_high_school_computer_science|5": {
    "acc": 0.39,
    "acc_stderr": 0.04902071300001975,
    "acc_norm": 0.39,
    "acc_norm_stderr": 0.04902071300001975
  },
  "harness|ko_mmlu_professional_medicine|5": {
    "acc": 0.34191176470588236,
    "acc_stderr": 0.02881472242225417,
    "acc_norm": 0.34191176470588236,
    "acc_norm_stderr": 0.02881472242225417
  },
  "harness|ko_mmlu_security_studies|5": {
    "acc": 0.4448979591836735,
    "acc_stderr": 0.031814251181977865,
    "acc_norm": 0.4448979591836735,
    "acc_norm_stderr": 0.031814251181977865
  },
  "harness|ko_mmlu_high_school_world_history|5": {
    "acc": 0.5822784810126582,
    "acc_stderr": 0.032103530322412685,
    "acc_norm": 0.5822784810126582,
    "acc_norm_stderr": 0.032103530322412685
  },
  "harness|ko_mmlu_professional_law|5": {
    "acc": 0.363754889178618,
    "acc_stderr": 0.012286991879902887,
    "acc_norm": 0.363754889178618,
    "acc_norm_stderr": 0.012286991879902887
  },
  "harness|ko_mmlu_high_school_us_history|5": {
    "acc": 0.5,
    "acc_stderr": 0.03509312031717982,
    "acc_norm": 0.5,
    "acc_norm_stderr": 0.03509312031717982
  },
  "harness|ko_mmlu_high_school_european_history|5": {
    "acc": 0.5151515151515151,
    "acc_stderr": 0.03902551007374448,
    "acc_norm": 0.5151515151515151,
    "acc_norm_stderr": 0.03902551007374448
  },
  "harness|ko_truthfulqa_mc|0": {
    "mc1": 0.2839657282741738,
    "mc1_stderr": 0.015785370858396708,
    "mc2": 0.444838685797901,
    "mc2_stderr": 0.015532530203119514
  },
  "harness|ko_commongen_v2|2": {
    "acc": 0.3742621015348288,
    "acc_stderr": 0.016637917789798735,
    "acc_norm": 0.4179456906729634,
    "acc_norm_stderr": 0.016957292005279723
  }
} 
 | 
	{
  "all": 0,
  "harness|ko_arc_challenge|25": 0,
  "harness|ko_hellaswag|10": 0,
  "harness|ko_mmlu_world_religions|5": 1,
  "harness|ko_mmlu_management|5": 1,
  "harness|ko_mmlu_miscellaneous|5": 1,
  "harness|ko_mmlu_anatomy|5": 1,
  "harness|ko_mmlu_abstract_algebra|5": 1,
  "harness|ko_mmlu_conceptual_physics|5": 1,
  "harness|ko_mmlu_virology|5": 1,
  "harness|ko_mmlu_philosophy|5": 1,
  "harness|ko_mmlu_human_aging|5": 1,
  "harness|ko_mmlu_human_sexuality|5": 1,
  "harness|ko_mmlu_medical_genetics|5": 1,
  "harness|ko_mmlu_high_school_geography|5": 1,
  "harness|ko_mmlu_electrical_engineering|5": 1,
  "harness|ko_mmlu_college_physics|5": 1,
  "harness|ko_mmlu_high_school_microeconomics|5": 1,
  "harness|ko_mmlu_high_school_macroeconomics|5": 1,
  "harness|ko_mmlu_computer_security|5": 1,
  "harness|ko_mmlu_global_facts|5": 1,
  "harness|ko_mmlu_jurisprudence|5": 1,
  "harness|ko_mmlu_high_school_chemistry|5": 1,
  "harness|ko_mmlu_high_school_biology|5": 1,
  "harness|ko_mmlu_marketing|5": 1,
  "harness|ko_mmlu_clinical_knowledge|5": 1,
  "harness|ko_mmlu_public_relations|5": 1,
  "harness|ko_mmlu_high_school_mathematics|5": 1,
  "harness|ko_mmlu_high_school_physics|5": 1,
  "harness|ko_mmlu_sociology|5": 1,
  "harness|ko_mmlu_college_medicine|5": 1,
  "harness|ko_mmlu_elementary_mathematics|5": 1,
  "harness|ko_mmlu_college_biology|5": 1,
  "harness|ko_mmlu_college_chemistry|5": 1,
  "harness|ko_mmlu_us_foreign_policy|5": 1,
  "harness|ko_mmlu_moral_disputes|5": 1,
  "harness|ko_mmlu_logical_fallacies|5": 1,
  "harness|ko_mmlu_prehistory|5": 1,
  "harness|ko_mmlu_college_mathematics|5": 1,
  "harness|ko_mmlu_high_school_government_and_politics|5": 1,
  "harness|ko_mmlu_econometrics|5": 1,
  "harness|ko_mmlu_high_school_psychology|5": 1,
  "harness|ko_mmlu_formal_logic|5": 1,
  "harness|ko_mmlu_nutrition|5": 1,
  "harness|ko_mmlu_business_ethics|5": 1,
  "harness|ko_mmlu_international_law|5": 1,
  "harness|ko_mmlu_astronomy|5": 1,
  "harness|ko_mmlu_professional_psychology|5": 1,
  "harness|ko_mmlu_professional_accounting|5": 1,
  "harness|ko_mmlu_machine_learning|5": 1,
  "harness|ko_mmlu_high_school_statistics|5": 1,
  "harness|ko_mmlu_moral_scenarios|5": 1,
  "harness|ko_mmlu_college_computer_science|5": 1,
  "harness|ko_mmlu_high_school_computer_science|5": 1,
  "harness|ko_mmlu_professional_medicine|5": 1,
  "harness|ko_mmlu_security_studies|5": 1,
  "harness|ko_mmlu_high_school_world_history|5": 1,
  "harness|ko_mmlu_professional_law|5": 1,
  "harness|ko_mmlu_high_school_us_history|5": 1,
  "harness|ko_mmlu_high_school_european_history|5": 1,
  "harness|ko_truthfulqa_mc|0": 0,
  "harness|ko_commongen_v2|2": 1
} 
 | 
	{
  "model_name": "jjourney1125/llama2-dev",
  "model_sha": "66931bf246639e144dcd1e8b255a2222e210e2f0",
  "model_dtype": "torch.float16",
  "lighteval_sha": "",
  "num_few_shot_default": 0,
  "num_fewshot_seeds": 1,
  "override_batch_size": 1,
  "max_samples": null
} 
 | 
					
No dataset card yet
- Downloads last month
 - 21