ramu0e commited on
Commit
a049e0e
·
verified ·
1 Parent(s): 6293286

Upload pipeline epoch130 global_step160000

Browse files
model_index.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "DiTPipeline",
3
+ "_diffusers_version": "0.35.2",
4
+ "scheduler": [
5
+ "diffusers",
6
+ "FlowMatchEulerDiscreteScheduler"
7
+ ],
8
+ "transformer": [
9
+ "minisora.models.modeling_dit",
10
+ "DiTModel"
11
+ ]
12
+ }
scheduler/scheduler_config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FlowMatchEulerDiscreteScheduler",
3
+ "_diffusers_version": "0.35.2",
4
+ "base_image_seq_len": 256,
5
+ "base_shift": 0.5,
6
+ "invert_sigmas": false,
7
+ "max_image_seq_len": 4096,
8
+ "max_shift": 1.15,
9
+ "num_train_timesteps": 1000,
10
+ "shift": 1.0,
11
+ "shift_terminal": null,
12
+ "stochastic_sampling": false,
13
+ "time_shift_type": "exponential",
14
+ "use_beta_sigmas": false,
15
+ "use_dynamic_shifting": false,
16
+ "use_exponential_sigmas": false,
17
+ "use_karras_sigmas": false
18
+ }
transformer/config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "DiTModel",
3
+ "_diffusers_version": "0.35.2",
4
+ "attention_head_dim": 64,
5
+ "attn_implementation": "flash_attention_2",
6
+ "eps": 1e-06,
7
+ "ffn_dim": 1536,
8
+ "freq_dim": 256,
9
+ "in_channels": 3,
10
+ "num_attention_heads": 6,
11
+ "num_layers": 12,
12
+ "out_channels": 3,
13
+ "patch_size": [
14
+ 2,
15
+ 4,
16
+ 4
17
+ ],
18
+ "pos_embed_seq_len": null,
19
+ "rope_max_seq_len": 1024
20
+ }
transformer/diffusion_pytorch_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db4651012bf4d70097e647ef2b4cffa8aa7419d0f3d0edfccd392078c58bfaec
3
+ size 44997672