NAYU01 commited on
Commit
e8c1aac
·
verified ·
1 Parent(s): bb9c648

Model save

Browse files
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- base_model: google/gemma-3-12b-pt
3
  library_name: transformers
4
  model_name: gemma-product-description
5
  tags:
@@ -11,7 +11,7 @@ licence: license
11
 
12
  # Model Card for gemma-product-description
13
 
14
- This model is a fine-tuned version of [google/gemma-3-12b-pt](https://huggingface.co/google/gemma-3-12b-pt).
15
  It has been trained using [TRL](https://github.com/huggingface/trl).
16
 
17
  ## Quick start
 
1
  ---
2
+ base_model: google/gemma-3-4b-pt
3
  library_name: transformers
4
  model_name: gemma-product-description
5
  tags:
 
11
 
12
  # Model Card for gemma-product-description
13
 
14
+ This model is a fine-tuned version of [google/gemma-3-4b-pt](https://huggingface.co/google/gemma-3-4b-pt).
15
  It has been trained using [TRL](https://github.com/huggingface/trl).
16
 
17
  ## Quick start
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "google/gemma-3-12b-pt",
5
  "bias": "none",
6
  "eva_config": null,
7
  "exclude_modules": null,
@@ -27,16 +27,16 @@
27
  "revision": null,
28
  "target_modules": [
29
  "q_proj",
30
- "lm_head",
 
31
  "fc2",
32
  "v_proj",
33
- "up_proj",
34
- "down_proj",
35
- "fc1",
36
  "o_proj",
37
- "gate_proj",
38
  "out_proj",
39
- "k_proj"
 
 
 
40
  ],
41
  "task_type": "CAUSAL_LM",
42
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "google/gemma-3-4b-pt",
5
  "bias": "none",
6
  "eva_config": null,
7
  "exclude_modules": null,
 
27
  "revision": null,
28
  "target_modules": [
29
  "q_proj",
30
+ "fc1",
31
+ "up_proj",
32
  "fc2",
33
  "v_proj",
 
 
 
34
  "o_proj",
 
35
  "out_proj",
36
+ "down_proj",
37
+ "k_proj",
38
+ "gate_proj",
39
+ "lm_head"
40
  ],
41
  "task_type": "CAUSAL_LM",
42
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d5fce00670c3aef594b2c234ba787321fa3f499a34d199692d46070def142064
3
- size 4324331232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c977a82f4741ab648107ba93f8c5e82bc7302855d93347a3c69066e2af3fb251
3
+ size 2839124552
runs/May26_17-36-00_527cf256990d/events.out.tfevents.1748280962.527cf256990d.4021.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a885dddeba4fa48025ca7dad7bcf6a5ff3f1e224f5f454a8c25765a2c9d2fe99
3
- size 8128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63c011b02b49c5b8fee45b8d4263172d55a6372223746d50c1748ebc91fc99c5
3
+ size 8746
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:88ffb638b88bf0c8c9cdcd4091523039b48b17d30bc119d249e0f5d468a942e3
3
  size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7c42357b01231f5edff7a1b34dda90471873552bd1cf82b592704588af1e42e
3
  size 5624