Upload tokenizer
Browse files- added_tokens.json +3 -0
- tokenizer.model +3 -0
- tokenizer_config.json +1 -2
added_tokens.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"<image_soft_token>": 262144
|
| 3 |
+
}
|
tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1299c11d7cf632ef3b4e11937501358ada021bbdf7c47638d13c0ee982f2e79c
|
| 3 |
+
size 4689074
|
tokenizer_config.json
CHANGED
|
@@ -51337,11 +51337,10 @@
|
|
| 51337 |
"image_token": "<image_soft_token>",
|
| 51338 |
"model_max_length": 1000000000000000019884624838656,
|
| 51339 |
"pad_token": "<eos>",
|
| 51340 |
-
"padding_side": "left",
|
| 51341 |
"processor_class": "Gemma3Processor",
|
| 51342 |
"sp_model_kwargs": null,
|
| 51343 |
"spaces_between_special_tokens": false,
|
| 51344 |
-
"tokenizer_class": "
|
| 51345 |
"unk_token": "<unk>",
|
| 51346 |
"use_default_system_prompt": false
|
| 51347 |
}
|
|
|
|
| 51337 |
"image_token": "<image_soft_token>",
|
| 51338 |
"model_max_length": 1000000000000000019884624838656,
|
| 51339 |
"pad_token": "<eos>",
|
|
|
|
| 51340 |
"processor_class": "Gemma3Processor",
|
| 51341 |
"sp_model_kwargs": null,
|
| 51342 |
"spaces_between_special_tokens": false,
|
| 51343 |
+
"tokenizer_class": "GemmaTokenizer",
|
| 51344 |
"unk_token": "<unk>",
|
| 51345 |
"use_default_system_prompt": false
|
| 51346 |
}
|