tikanosa commited on
Commit
3510634
·
verified ·
1 Parent(s): e52ffca

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. README.md +0 -0
  2. model.safetensors +1 -1
  3. tokenizer_config.json +0 -7
README.md CHANGED
The diff for this file is too large to render. See raw diff
 
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1ac80013c430d1209ad9843e8c62f6cba7873ab2c9970c41ad2f3d0aa7acb19
3
  size 90866412
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ff8ebcf33513ca4d19e999c262cf6847f4d9cd5131fb7b96be52b27df354aa0
3
  size 90866412
tokenizer_config.json CHANGED
@@ -47,19 +47,12 @@
47
  "do_lower_case": true,
48
  "extra_special_tokens": {},
49
  "mask_token": "[MASK]",
50
- "max_length": 512,
51
  "model_max_length": 512,
52
  "never_split": null,
53
- "pad_to_multiple_of": null,
54
  "pad_token": "[PAD]",
55
- "pad_token_type_id": 0,
56
- "padding_side": "right",
57
  "sep_token": "[SEP]",
58
- "stride": 0,
59
  "strip_accents": null,
60
  "tokenize_chinese_chars": true,
61
  "tokenizer_class": "BertTokenizer",
62
- "truncation_side": "right",
63
- "truncation_strategy": "longest_first",
64
  "unk_token": "[UNK]"
65
  }
 
47
  "do_lower_case": true,
48
  "extra_special_tokens": {},
49
  "mask_token": "[MASK]",
 
50
  "model_max_length": 512,
51
  "never_split": null,
 
52
  "pad_token": "[PAD]",
 
 
53
  "sep_token": "[SEP]",
 
54
  "strip_accents": null,
55
  "tokenize_chinese_chars": true,
56
  "tokenizer_class": "BertTokenizer",
 
 
57
  "unk_token": "[UNK]"
58
  }