MightyOctopus commited on
Commit
b819a08
·
verified ·
1 Parent(s): d1faa94

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -21,11 +21,5 @@
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
- "pad_token": {
25
- "content": "<|endoftext|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
  }
 
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
+ "pad_token": "<|im_end|>"
 
 
 
 
 
 
25
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0a018134fd4ea264da6ed1c1b00e4b36533af34a96780e2333ed9f6779a215f7
3
- size 11422932
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json CHANGED
@@ -232,7 +232,7 @@
232
  "errors": "replace",
233
  "extra_special_tokens": {},
234
  "model_max_length": 131072,
235
- "pad_token": "<|endoftext|>",
236
  "split_special_tokens": false,
237
  "tokenizer_class": "Qwen2Tokenizer",
238
  "unk_token": null
 
232
  "errors": "replace",
233
  "extra_special_tokens": {},
234
  "model_max_length": 131072,
235
+ "pad_token": "<|im_end|>",
236
  "split_special_tokens": false,
237
  "tokenizer_class": "Qwen2Tokenizer",
238
  "unk_token": null