AdamOswald1 commited on
Commit
988de64
·
1 Parent(s): 2a05724

Create config.json (#17)

Browse files

- Create config.json (51f432cfeab5a37d39830d5e400b5e5e54f9e4a8)

Files changed (1) hide show
  1. safety_checker/config.json +10 -2
safety_checker/config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "_name_or_path": "CompVis/stable-diffusion-safety-checker",
3
  "architectures": [
4
  "StableDiffusionSafetyChecker"
@@ -13,6 +14,7 @@
13
  "architectures": null,
14
  "attention_dropout": 0.0,
15
  "bad_words_ids": null,
 
16
  "bos_token_id": 0,
17
  "chunk_size_feed_forward": 0,
18
  "cross_attention_hidden_size": null,
@@ -66,8 +68,10 @@
66
  "return_dict": true,
67
  "return_dict_in_generate": false,
68
  "sep_token_id": null,
 
69
  "task_specific_params": null,
70
  "temperature": 1.0,
 
71
  "tie_encoder_decoder": false,
72
  "tie_word_embeddings": true,
73
  "tokenizer_class": null,
@@ -75,7 +79,7 @@
75
  "top_p": 1.0,
76
  "torch_dtype": null,
77
  "torchscript": false,
78
- "transformers_version": "4.19.2",
79
  "typical_p": 1.0,
80
  "use_bfloat16": false,
81
  "vocab_size": 49408
@@ -94,6 +98,7 @@
94
  "architectures": null,
95
  "attention_dropout": 0.0,
96
  "bad_words_ids": null,
 
97
  "bos_token_id": null,
98
  "chunk_size_feed_forward": 0,
99
  "cross_attention_hidden_size": null,
@@ -133,6 +138,7 @@
133
  "num_attention_heads": 16,
134
  "num_beam_groups": 1,
135
  "num_beams": 1,
 
136
  "num_hidden_layers": 24,
137
  "num_return_sequences": 1,
138
  "output_attentions": false,
@@ -148,8 +154,10 @@
148
  "return_dict": true,
149
  "return_dict_in_generate": false,
150
  "sep_token_id": null,
 
151
  "task_specific_params": null,
152
  "temperature": 1.0,
 
153
  "tie_encoder_decoder": false,
154
  "tie_word_embeddings": true,
155
  "tokenizer_class": null,
@@ -157,7 +165,7 @@
157
  "top_p": 1.0,
158
  "torch_dtype": null,
159
  "torchscript": false,
160
- "transformers_version": "4.19.2",
161
  "typical_p": 1.0,
162
  "use_bfloat16": false
163
  },
 
1
  {
2
+ "_commit_hash": "4bb648a606ef040e7685bde262611766a5fdd67b",
3
  "_name_or_path": "CompVis/stable-diffusion-safety-checker",
4
  "architectures": [
5
  "StableDiffusionSafetyChecker"
 
14
  "architectures": null,
15
  "attention_dropout": 0.0,
16
  "bad_words_ids": null,
17
+ "begin_suppress_tokens": null,
18
  "bos_token_id": 0,
19
  "chunk_size_feed_forward": 0,
20
  "cross_attention_hidden_size": null,
 
68
  "return_dict": true,
69
  "return_dict_in_generate": false,
70
  "sep_token_id": null,
71
+ "suppress_tokens": null,
72
  "task_specific_params": null,
73
  "temperature": 1.0,
74
+ "tf_legacy_loss": false,
75
  "tie_encoder_decoder": false,
76
  "tie_word_embeddings": true,
77
  "tokenizer_class": null,
 
79
  "top_p": 1.0,
80
  "torch_dtype": null,
81
  "torchscript": false,
82
+ "transformers_version": "4.23.1",
83
  "typical_p": 1.0,
84
  "use_bfloat16": false,
85
  "vocab_size": 49408
 
98
  "architectures": null,
99
  "attention_dropout": 0.0,
100
  "bad_words_ids": null,
101
+ "begin_suppress_tokens": null,
102
  "bos_token_id": null,
103
  "chunk_size_feed_forward": 0,
104
  "cross_attention_hidden_size": null,
 
138
  "num_attention_heads": 16,
139
  "num_beam_groups": 1,
140
  "num_beams": 1,
141
+ "num_channels": 3,
142
  "num_hidden_layers": 24,
143
  "num_return_sequences": 1,
144
  "output_attentions": false,
 
154
  "return_dict": true,
155
  "return_dict_in_generate": false,
156
  "sep_token_id": null,
157
+ "suppress_tokens": null,
158
  "task_specific_params": null,
159
  "temperature": 1.0,
160
+ "tf_legacy_loss": false,
161
  "tie_encoder_decoder": false,
162
  "tie_word_embeddings": true,
163
  "tokenizer_class": null,
 
165
  "top_p": 1.0,
166
  "torch_dtype": null,
167
  "torchscript": false,
168
+ "transformers_version": "4.23.1",
169
  "typical_p": 1.0,
170
  "use_bfloat16": false
171
  },