Upload folder using huggingface_hub
Browse files- .gitattributes +22 -0
- .job_config.json +120 -0
- config.yaml +101 -0
- log.txt +0 -0
- optimizer.pt +3 -0
- samples/1754680704127__000000000_0.jpg +0 -0
- samples/1754680954383__000000000_1.jpg +3 -0
- samples/1754682645203__000000250_0.jpg +0 -0
- samples/1754682897160__000000250_1.jpg +3 -0
- samples/1754684603295__000000500_0.jpg +0 -0
- samples/1754684855471__000000500_1.jpg +3 -0
- samples/1754686558622__000000750_0.jpg +3 -0
- samples/1754686810772__000000750_1.jpg +3 -0
- samples/1754688514183__000001000_0.jpg +0 -0
- samples/1754688766311__000001000_1.jpg +3 -0
- samples/1754690468085__000001250_0.jpg +3 -0
- samples/1754690720285__000001250_1.jpg +3 -0
- samples/1754692422398__000001500_0.jpg +3 -0
- samples/1754692674400__000001500_1.jpg +3 -0
- samples/1754694377605__000001750_0.jpg +3 -0
- samples/1754694629787__000001750_1.jpg +3 -0
- samples/1754696333775__000002000_0.jpg +3 -0
- samples/1754696585958__000002000_1.jpg +3 -0
- samples/1754698293007__000002250_0.jpg +3 -0
- samples/1754698545223__000002250_1.jpg +3 -0
- samples/1754700253716__000002500_0.jpg +3 -0
- samples/1754700505906__000002500_1.jpg +3 -0
- samples/1754702213594__000002750_0.jpg +3 -0
- samples/1754702465712__000002750_1.jpg +3 -0
- samples/1754704167607__000003000_0.jpg +3 -0
- samples/1754704419868__000003000_1.jpg +3 -0
- skilgrimr_v1.safetensors +3 -0
- skilgrimr_v1_000002000.safetensors +3 -0
- skilgrimr_v1_000002250.safetensors +3 -0
- skilgrimr_v1_000002500.safetensors +3 -0
- skilgrimr_v1_000002750.safetensors +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,25 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
samples/1754680954383__000000000_1.jpg filter=lfs diff=lfs merge=lfs -text
|
37 |
+
samples/1754682897160__000000250_1.jpg filter=lfs diff=lfs merge=lfs -text
|
38 |
+
samples/1754684855471__000000500_1.jpg filter=lfs diff=lfs merge=lfs -text
|
39 |
+
samples/1754686558622__000000750_0.jpg filter=lfs diff=lfs merge=lfs -text
|
40 |
+
samples/1754686810772__000000750_1.jpg filter=lfs diff=lfs merge=lfs -text
|
41 |
+
samples/1754688766311__000001000_1.jpg filter=lfs diff=lfs merge=lfs -text
|
42 |
+
samples/1754690468085__000001250_0.jpg filter=lfs diff=lfs merge=lfs -text
|
43 |
+
samples/1754690720285__000001250_1.jpg filter=lfs diff=lfs merge=lfs -text
|
44 |
+
samples/1754692422398__000001500_0.jpg filter=lfs diff=lfs merge=lfs -text
|
45 |
+
samples/1754692674400__000001500_1.jpg filter=lfs diff=lfs merge=lfs -text
|
46 |
+
samples/1754694377605__000001750_0.jpg filter=lfs diff=lfs merge=lfs -text
|
47 |
+
samples/1754694629787__000001750_1.jpg filter=lfs diff=lfs merge=lfs -text
|
48 |
+
samples/1754696333775__000002000_0.jpg filter=lfs diff=lfs merge=lfs -text
|
49 |
+
samples/1754696585958__000002000_1.jpg filter=lfs diff=lfs merge=lfs -text
|
50 |
+
samples/1754698293007__000002250_0.jpg filter=lfs diff=lfs merge=lfs -text
|
51 |
+
samples/1754698545223__000002250_1.jpg filter=lfs diff=lfs merge=lfs -text
|
52 |
+
samples/1754700253716__000002500_0.jpg filter=lfs diff=lfs merge=lfs -text
|
53 |
+
samples/1754700505906__000002500_1.jpg filter=lfs diff=lfs merge=lfs -text
|
54 |
+
samples/1754702213594__000002750_0.jpg filter=lfs diff=lfs merge=lfs -text
|
55 |
+
samples/1754702465712__000002750_1.jpg filter=lfs diff=lfs merge=lfs -text
|
56 |
+
samples/1754704167607__000003000_0.jpg filter=lfs diff=lfs merge=lfs -text
|
57 |
+
samples/1754704419868__000003000_1.jpg filter=lfs diff=lfs merge=lfs -text
|
.job_config.json
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"job": "extension",
|
3 |
+
"config": {
|
4 |
+
"name": "skilgrimr_v1",
|
5 |
+
"process": [
|
6 |
+
{
|
7 |
+
"type": "ui_trainer",
|
8 |
+
"training_folder": "/workspace/ai-toolkit/output",
|
9 |
+
"sqlite_db_path": "/workspace/ai-toolkit/aitk_db.db",
|
10 |
+
"device": "cuda",
|
11 |
+
"trigger_word": "skilgrimr",
|
12 |
+
"performance_log_every": 10,
|
13 |
+
"network": {
|
14 |
+
"type": "lora",
|
15 |
+
"linear": 32,
|
16 |
+
"linear_alpha": 32,
|
17 |
+
"conv": 16,
|
18 |
+
"conv_alpha": 16,
|
19 |
+
"lokr_full_rank": true,
|
20 |
+
"lokr_factor": -1,
|
21 |
+
"network_kwargs": {
|
22 |
+
"ignore_if_contains": []
|
23 |
+
}
|
24 |
+
},
|
25 |
+
"save": {
|
26 |
+
"dtype": "bf16",
|
27 |
+
"save_every": 250,
|
28 |
+
"max_step_saves_to_keep": 4,
|
29 |
+
"save_format": "diffusers",
|
30 |
+
"push_to_hub": false
|
31 |
+
},
|
32 |
+
"datasets": [
|
33 |
+
{
|
34 |
+
"folder_path": "/workspace/ai-toolkit/datasets/skilgrimr",
|
35 |
+
"control_path": null,
|
36 |
+
"mask_path": null,
|
37 |
+
"mask_min_value": 0.1,
|
38 |
+
"default_caption": "",
|
39 |
+
"caption_ext": "txt",
|
40 |
+
"caption_dropout_rate": 0.05,
|
41 |
+
"cache_latents_to_disk": false,
|
42 |
+
"is_reg": false,
|
43 |
+
"network_weight": 1,
|
44 |
+
"resolution": [
|
45 |
+
1024
|
46 |
+
],
|
47 |
+
"controls": [],
|
48 |
+
"shrink_video_to_frames": true,
|
49 |
+
"num_frames": 1,
|
50 |
+
"do_i2v": true
|
51 |
+
}
|
52 |
+
],
|
53 |
+
"train": {
|
54 |
+
"batch_size": 1,
|
55 |
+
"bypass_guidance_embedding": false,
|
56 |
+
"steps": 3000,
|
57 |
+
"gradient_accumulation": 1,
|
58 |
+
"train_unet": true,
|
59 |
+
"train_text_encoder": false,
|
60 |
+
"gradient_checkpointing": true,
|
61 |
+
"noise_scheduler": "flowmatch",
|
62 |
+
"optimizer": "adamw8bit",
|
63 |
+
"timestep_type": "weighted",
|
64 |
+
"content_or_style": "balanced",
|
65 |
+
"optimizer_params": {
|
66 |
+
"weight_decay": 0.0001
|
67 |
+
},
|
68 |
+
"unload_text_encoder": false,
|
69 |
+
"cache_text_embeddings": false,
|
70 |
+
"lr": 0.0001,
|
71 |
+
"ema_config": {
|
72 |
+
"use_ema": false,
|
73 |
+
"ema_decay": 0.99
|
74 |
+
},
|
75 |
+
"skip_first_sample": false,
|
76 |
+
"disable_sampling": false,
|
77 |
+
"dtype": "bf16",
|
78 |
+
"diff_output_preservation": false,
|
79 |
+
"diff_output_preservation_multiplier": 1,
|
80 |
+
"diff_output_preservation_class": "person"
|
81 |
+
},
|
82 |
+
"model": {
|
83 |
+
"name_or_path": "Qwen/Qwen-Image",
|
84 |
+
"quantize": true,
|
85 |
+
"qtype": "qfloat8",
|
86 |
+
"quantize_te": true,
|
87 |
+
"qtype_te": "qfloat8",
|
88 |
+
"arch": "qwen_image",
|
89 |
+
"low_vram": true,
|
90 |
+
"model_kwargs": {}
|
91 |
+
},
|
92 |
+
"sample": {
|
93 |
+
"sampler": "flowmatch",
|
94 |
+
"sample_every": 250,
|
95 |
+
"width": 1328,
|
96 |
+
"height": 1328,
|
97 |
+
"samples": [
|
98 |
+
{
|
99 |
+
"prompt": "portrait of skilgrimr, neutral gray seamless backdrop, 85mm lens look, soft key + gentle fill, mouth closed, slight smile, sharp eyes, realistic skin texture, minimal shadows, photographic"
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"prompt": "rain-soaked night street, close-up portrait of skilgrimr, neon reflections and teal–magenta bokeh, shallow depth of field, wet hair detail on skilgrimr, cinematic cyberpunk mood, specular highlights on skilgrimr’s skin, 35mm lens look, no hood, no high collar Ask ChatGPT"
|
103 |
+
}
|
104 |
+
],
|
105 |
+
"neg": "",
|
106 |
+
"seed": 42,
|
107 |
+
"walk_seed": true,
|
108 |
+
"guidance_scale": 4,
|
109 |
+
"sample_steps": 50,
|
110 |
+
"num_frames": 1,
|
111 |
+
"fps": 1
|
112 |
+
}
|
113 |
+
}
|
114 |
+
]
|
115 |
+
},
|
116 |
+
"meta": {
|
117 |
+
"name": "[name]",
|
118 |
+
"version": "1.0"
|
119 |
+
}
|
120 |
+
}
|
config.yaml
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
job: extension
|
2 |
+
config:
|
3 |
+
name: skilgrimr_v1
|
4 |
+
process:
|
5 |
+
- type: ui_trainer
|
6 |
+
training_folder: /workspace/ai-toolkit/output
|
7 |
+
sqlite_db_path: /workspace/ai-toolkit/aitk_db.db
|
8 |
+
device: cuda
|
9 |
+
trigger_word: skilgrimr
|
10 |
+
performance_log_every: 10
|
11 |
+
network:
|
12 |
+
type: lora
|
13 |
+
linear: 32
|
14 |
+
linear_alpha: 32
|
15 |
+
conv: 16
|
16 |
+
conv_alpha: 16
|
17 |
+
lokr_full_rank: true
|
18 |
+
lokr_factor: -1
|
19 |
+
network_kwargs:
|
20 |
+
ignore_if_contains: []
|
21 |
+
save:
|
22 |
+
dtype: bf16
|
23 |
+
save_every: 250
|
24 |
+
max_step_saves_to_keep: 4
|
25 |
+
save_format: diffusers
|
26 |
+
push_to_hub: false
|
27 |
+
datasets:
|
28 |
+
- folder_path: /workspace/ai-toolkit/datasets/skilgrimr
|
29 |
+
control_path: null
|
30 |
+
mask_path: null
|
31 |
+
mask_min_value: 0.1
|
32 |
+
default_caption: ''
|
33 |
+
caption_ext: txt
|
34 |
+
caption_dropout_rate: 0.05
|
35 |
+
cache_latents_to_disk: false
|
36 |
+
is_reg: false
|
37 |
+
network_weight: 1
|
38 |
+
resolution:
|
39 |
+
- 1024
|
40 |
+
controls: []
|
41 |
+
shrink_video_to_frames: true
|
42 |
+
num_frames: 1
|
43 |
+
do_i2v: true
|
44 |
+
train:
|
45 |
+
batch_size: 1
|
46 |
+
bypass_guidance_embedding: false
|
47 |
+
steps: 3000
|
48 |
+
gradient_accumulation: 1
|
49 |
+
train_unet: true
|
50 |
+
train_text_encoder: false
|
51 |
+
gradient_checkpointing: true
|
52 |
+
noise_scheduler: flowmatch
|
53 |
+
optimizer: adamw8bit
|
54 |
+
timestep_type: weighted
|
55 |
+
content_or_style: balanced
|
56 |
+
optimizer_params:
|
57 |
+
weight_decay: 0.0001
|
58 |
+
unload_text_encoder: false
|
59 |
+
cache_text_embeddings: false
|
60 |
+
lr: 0.0001
|
61 |
+
ema_config:
|
62 |
+
use_ema: false
|
63 |
+
ema_decay: 0.99
|
64 |
+
skip_first_sample: false
|
65 |
+
disable_sampling: false
|
66 |
+
dtype: bf16
|
67 |
+
diff_output_preservation: false
|
68 |
+
diff_output_preservation_multiplier: 1
|
69 |
+
diff_output_preservation_class: person
|
70 |
+
model:
|
71 |
+
name_or_path: Qwen/Qwen-Image
|
72 |
+
quantize: true
|
73 |
+
qtype: qfloat8
|
74 |
+
quantize_te: true
|
75 |
+
qtype_te: qfloat8
|
76 |
+
arch: qwen_image
|
77 |
+
low_vram: true
|
78 |
+
model_kwargs: {}
|
79 |
+
sample:
|
80 |
+
sampler: flowmatch
|
81 |
+
sample_every: 250
|
82 |
+
width: 1328
|
83 |
+
height: 1328
|
84 |
+
samples:
|
85 |
+
- prompt: portrait of skilgrimr, neutral gray seamless backdrop, 85mm lens look,
|
86 |
+
soft key + gentle fill, mouth closed, slight smile, sharp eyes, realistic
|
87 |
+
skin texture, minimal shadows, photographic
|
88 |
+
- prompt: "rain-soaked night street, close-up portrait of skilgrimr, neon reflections\
|
89 |
+
\ and teal\u2013magenta bokeh, shallow depth of field, wet hair detail on\
|
90 |
+
\ skilgrimr, cinematic cyberpunk mood, specular highlights on skilgrimr\u2019\
|
91 |
+
s skin, 35mm lens look, no hood, no high collar Ask ChatGPT"
|
92 |
+
neg: ''
|
93 |
+
seed: 42
|
94 |
+
walk_seed: true
|
95 |
+
guidance_scale: 4
|
96 |
+
sample_steps: 50
|
97 |
+
num_frames: 1
|
98 |
+
fps: 1
|
99 |
+
meta:
|
100 |
+
name: skilgrimr_v1
|
101 |
+
version: '1.0'
|
log.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d9a45b13aa0751258bf376558b97f34be48fb1103fbdc0bac72fc6e277ad44fd
|
3 |
+
size 598613389
|
samples/1754680704127__000000000_0.jpg
ADDED
![]() |
samples/1754680954383__000000000_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754682645203__000000250_0.jpg
ADDED
![]() |
samples/1754682897160__000000250_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754684603295__000000500_0.jpg
ADDED
![]() |
samples/1754684855471__000000500_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754686558622__000000750_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754686810772__000000750_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754688514183__000001000_0.jpg
ADDED
![]() |
samples/1754688766311__000001000_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754690468085__000001250_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754690720285__000001250_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754692422398__000001500_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754692674400__000001500_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754694377605__000001750_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754694629787__000001750_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754696333775__000002000_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754696585958__000002000_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754698293007__000002250_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754698545223__000002250_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754700253716__000002500_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754700505906__000002500_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754702213594__000002750_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754702465712__000002750_1.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754704167607__000003000_0.jpg
ADDED
![]() |
Git LFS Details
|
samples/1754704419868__000003000_1.jpg
ADDED
![]() |
Git LFS Details
|
skilgrimr_v1.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:91e3fb4512700a9a68a407236bd1c780314896141470f1b85c49648cebe15c4d
|
3 |
+
size 590058880
|
skilgrimr_v1_000002000.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ea4e8046cc008813acdcec206d526b44ccea365b865d12dcabdce4d135c3d38
|
3 |
+
size 590058880
|
skilgrimr_v1_000002250.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7b7fc2e10e20799ca43fcc42f219a8b48cd33c3420ef7f15052de326d5b5904b
|
3 |
+
size 590058880
|
skilgrimr_v1_000002500.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:35023fe9cc594f6ab0b17bde8b58040b6d68aea1d6b7e7f9765ebae6fb79c656
|
3 |
+
size 590058880
|
skilgrimr_v1_000002750.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0ca9803cb8061cc42cba8d10fb682d3cde9778ae5bb25a7832720fb790c496ac
|
3 |
+
size 590058880
|