theekshana
commited on
Upload tokenizer
Browse files- special_tokens_map.json +1 -7
special_tokens_map.json
CHANGED
@@ -108,13 +108,7 @@
|
|
108 |
"rstrip": false,
|
109 |
"single_word": false
|
110 |
},
|
111 |
-
"pad_token":
|
112 |
-
"content": "</s>",
|
113 |
-
"lstrip": false,
|
114 |
-
"normalized": false,
|
115 |
-
"rstrip": false,
|
116 |
-
"single_word": false
|
117 |
-
},
|
118 |
"unk_token": {
|
119 |
"content": "<unk>",
|
120 |
"lstrip": false,
|
|
|
108 |
"rstrip": false,
|
109 |
"single_word": false
|
110 |
},
|
111 |
+
"pad_token": "</s>",
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
"unk_token": {
|
113 |
"content": "<unk>",
|
114 |
"lstrip": false,
|