psidharth567 commited on
Commit
015d942
·
verified ·
1 Parent(s): defe16c

Updated config. Removed "use_cache": true and also "cache_implementation". They were interfering in some versions of transformers.

Browse files
Files changed (1) hide show
  1. config.json +1 -2
config.json CHANGED
@@ -12,7 +12,6 @@
12
  "attention_dropout": 0.0,
13
  "attn_logit_softcapping": null,
14
  "bos_token_id": 2,
15
- "cache_implementation": "hybrid",
16
  "classifier_dropout_prob": 0.0,
17
  "eos_token_id": 1,
18
  "final_logit_softcapping": null,
@@ -102,7 +101,7 @@
102
  "sliding_window_pattern": 6,
103
  "torch_dtype": "float32",
104
  "transformers_version": "4.51.3",
105
- "use_cache": true,
106
  "use_non_causal_attention": true,
107
  "vocab_size": 262146
108
  }
 
12
  "attention_dropout": 0.0,
13
  "attn_logit_softcapping": null,
14
  "bos_token_id": 2,
 
15
  "classifier_dropout_prob": 0.0,
16
  "eos_token_id": 1,
17
  "final_logit_softcapping": null,
 
101
  "sliding_window_pattern": 6,
102
  "torch_dtype": "float32",
103
  "transformers_version": "4.51.3",
104
+ "use_cache": false,
105
  "use_non_causal_attention": true,
106
  "vocab_size": 262146
107
  }