Upload folder using huggingface_hub
Browse files- config.json +2 -2
- generation_config.json +1 -1
config.json
CHANGED
|
@@ -41,7 +41,7 @@
|
|
| 41 |
}
|
| 42 |
}
|
| 43 |
},
|
| 44 |
-
"format": "
|
| 45 |
"global_compression_ratio": 1.246153835878366,
|
| 46 |
"ignore": [
|
| 47 |
"lm_head"
|
|
@@ -172,7 +172,7 @@
|
|
| 172 |
"sliding_window": 262144,
|
| 173 |
"tie_word_embeddings": false,
|
| 174 |
"torch_dtype": "float16",
|
| 175 |
-
"transformers_version": "4.44.0
|
| 176 |
"use_cache": true,
|
| 177 |
"vocab_size": 32064
|
| 178 |
}
|
|
|
|
| 41 |
}
|
| 42 |
}
|
| 43 |
},
|
| 44 |
+
"format": "float-quantized",
|
| 45 |
"global_compression_ratio": 1.246153835878366,
|
| 46 |
"ignore": [
|
| 47 |
"lm_head"
|
|
|
|
| 172 |
"sliding_window": 262144,
|
| 173 |
"tie_word_embeddings": false,
|
| 174 |
"torch_dtype": "float16",
|
| 175 |
+
"transformers_version": "4.44.0",
|
| 176 |
"use_cache": true,
|
| 177 |
"vocab_size": 32064
|
| 178 |
}
|
generation_config.json
CHANGED
|
@@ -7,5 +7,5 @@
|
|
| 7 |
32007
|
| 8 |
],
|
| 9 |
"pad_token_id": 32000,
|
| 10 |
-
"transformers_version": "4.44.0
|
| 11 |
}
|
|
|
|
| 7 |
32007
|
| 8 |
],
|
| 9 |
"pad_token_id": 32000,
|
| 10 |
+
"transformers_version": "4.44.0"
|
| 11 |
}
|