Wonder-Griffin commited on
Commit
01226b7
1 Parent(s): 989cbb5

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. config.json +1 -75
  2. generation_config.json +4 -0
  3. model.safetensors +2 -2
  4. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,75 +1 @@
1
- {
2
- "return_dict": true,
3
- "output_hidden_states": false,
4
- "output_attentions": false,
5
- "torchscript": false,
6
- "torch_dtype": "float32", // Explicitly set dtype for PyTorch
7
- "use_bfloat16": false,
8
- "tf_legacy_loss": false,
9
- "pruned_heads": {},
10
- "tie_word_embeddings": true,
11
- "chunk_size_feed_forward": 0,
12
- "is_encoder_decoder": false,
13
- "is_decoder": false,
14
- "cross_attention_hidden_size": null,
15
- "add_cross_attention": false,
16
- "tie_encoder_decoder": false,
17
- "max_length": 50, // Increased for more meaningful text generation
18
- "min_length": 10, // Ensuring a minimum length for generated text
19
- "do_sample": true, // Enable sampling for more varied text generation
20
- "early_stopping": false,
21
- "num_beams": 3, // Increase for more diverse text generation
22
- "num_beam_groups": 1,
23
- "diversity_penalty": 0.0,
24
- "temperature": 0.8, // Adjusted for more creative but still coherent outputs
25
- "top_k": 40, // Lowered to make the model more focused
26
- "top_p": 0.9, // Typical value for balanced creativity and coherence
27
- "typical_p": 1.0,
28
- "repetition_penalty": 1.2, // Added to reduce repetition
29
- "length_penalty": 1.0,
30
- "no_repeat_ngram_size": 2, // Prevents repeating the same n-grams
31
- "encoder_no_repeat_ngram_size": 0,
32
- "bad_words_ids": null,
33
- "num_return_sequences": 1,
34
- "output_scores": false,
35
- "return_dict_in_generate": false,
36
- "forced_bos_token_id": null,
37
- "forced_eos_token_id": null,
38
- "remove_invalid_values": false,
39
- "exponential_decay_length_penalty": null,
40
- "suppress_tokens": null,
41
- "begin_suppress_tokens": null,
42
- "architectures": ["GPT2"],
43
- "finetuning_task": null,
44
- "id2label": {
45
- "0": "LABEL_0",
46
- "1": "LABEL_1"
47
- },
48
- "label2id": {
49
- "LABEL_0": 0,
50
- "LABEL_1": 1
51
- },
52
- "tokenizer_class": "BertTokenizerFast",
53
- "prefix": null,
54
- "bos_token_id": null,
55
- "pad_token_id": null,
56
- "eos_token_id": null,
57
- "sep_token_id": null,
58
- "decoder_start_token_id": null,
59
- "task_specific_params": null,
60
- "problem_type": null,
61
- "_name_or_path": "Wonder-Griffin/Judge-GPT2",
62
- "transformers_version": "4.43.3",
63
- "vocab_size": 50257,
64
- "n_layer": 12,
65
- "n_head": 12,
66
- "n_embd": 768,
67
- "output_dir": "./results",
68
- "block_size": 512,
69
- "hidden_size": 768,
70
- "dropout": 0.1,
71
- "bias": true,
72
- "ff_expansion_factor": 4,
73
- "label_smoothing": 0.1,
74
- "model_type": "GPT2"
75
- }
 
1
+ {"output_dir": "C:/Users/wonde/output", "vocab_size": 50257, "block_size": 512, "n_layer": 12, "n_head": 12, "n_embd": 768, "hidden_size": 768, "dropout": 0.1, "bias": true, "ff_expansion_factor": 4, "label_smoothing": 0.1}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
generation_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "transformers_version": "4.42.4"
4
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7e25c838c248db9fa3d4e79eae9bd1754f15678a6e422637376f063e08db85a3
3
- size 963697256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fc20e9db0e42a1ca83237f7570a105e05fb2aca500903d3925572cf61ccdb26
3
+ size 1118086856
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d1bd0d5c4770057f790120de38cce807e376900c50e430f3d0646f12e2f84149
3
  size 963788730
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64f629d79549f38a5f8a356f07e9270bb4d8d5e9823f912bae0b5e942f6a348e
3
  size 963788730