dacorvo HF Staff commited on
Commit
7650164
·
verified ·
1 Parent(s): 7e6e7ae

Synchronizing local compiler cache.

Browse files
Files changed (43) hide show
  1. .gitattributes +12 -0
  2. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/granite/ibm-granite/granite-3.1-2b-instruct/e3a25f6080ce21ddbea5.json +59 -0
  3. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/llama/unsloth/Llama-3.2-1B-Instruct/9468666db72cf8e90bee.json +64 -0
  4. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/phi3/microsoft/Phi-3-mini-4k-instruct/c76fd6924e0386c670aa.json +63 -0
  5. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/smollm3/HuggingFaceTB/SmolLM3-3B/c8931c218bbf1b28c5bd.json +135 -0
  6. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev2/llama/meta-llama/Meta-Llama-3.1-8B-Instruct/f52d9910d1c9d97af723.json +63 -0
  7. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev2/llama/unsloth/Llama-3.2-1B-Instruct/6b58992358ceb302871f.json +64 -0
  8. neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/compile_flags.json +1 -0
  9. neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/model.done +0 -0
  10. neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/model.hlo_module.pb +3 -0
  11. neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/model.neff +3 -0
  12. neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/compile_flags.json +1 -0
  13. neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/model.done +0 -0
  14. neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/model.hlo_module.pb +3 -0
  15. neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/model.neff +3 -0
  16. neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/wrapped_neff.hlo +3 -0
  17. neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/compile_flags.json +1 -0
  18. neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/model.done +0 -0
  19. neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/model.hlo_module.pb +3 -0
  20. neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/model.neff +3 -0
  21. neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/wrapped_neff.hlo +3 -0
  22. neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/compile_flags.json +1 -0
  23. neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/model.done +0 -0
  24. neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/model.hlo_module.pb +3 -0
  25. neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/model.neff +3 -0
  26. neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/compile_flags.json +1 -0
  27. neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/model.done +0 -0
  28. neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/model.hlo_module.pb +3 -0
  29. neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/model.neff +3 -0
  30. neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/compile_flags.json +1 -0
  31. neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/model.done +0 -0
  32. neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/model.hlo_module.pb +3 -0
  33. neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/model.neff +3 -0
  34. neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/wrapped_neff.hlo +3 -0
  35. neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/compile_flags.json +1 -0
  36. neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/model.done +0 -0
  37. neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/model.hlo_module.pb +3 -0
  38. neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/model.neff +3 -0
  39. neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/wrapped_neff.hlo +3 -0
  40. neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/compile_flags.json +1 -0
  41. neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/model.done +0 -0
  42. neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/model.hlo_module.pb +3 -0
  43. neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/model.neff +3 -0
.gitattributes CHANGED
@@ -14778,3 +14778,15 @@ neuronxcc-2.21.33363.0+82129205/MODULE_fd476e174a39b55eeb18+a02c3a36/model.neff
14778
  neuronxcc-2.21.33363.0+82129205/MODULE_fd476e174a39b55eeb18+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
14779
  neuronxcc-2.21.33363.0+82129205/MODULE_fff86602d73f8f8ecb89+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
14780
  neuronxcc-2.21.33363.0+82129205/MODULE_fff86602d73f8f8ecb89+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
14778
  neuronxcc-2.21.33363.0+82129205/MODULE_fd476e174a39b55eeb18+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
14779
  neuronxcc-2.21.33363.0+82129205/MODULE_fff86602d73f8f8ecb89+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
14780
  neuronxcc-2.21.33363.0+82129205/MODULE_fff86602d73f8f8ecb89+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
14781
+ neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
14782
+ neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
14783
+ neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
14784
+ neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
14785
+ neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
14786
+ neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
14787
+ neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
14788
+ neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
14789
+ neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
14790
+ neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
14791
+ neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
14792
+ neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/granite/ibm-granite/granite-3.1-2b-instruct/e3a25f6080ce21ddbea5.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "ibm-granite/granite-3.1-2b-instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.1,
10
+ "attention_multiplier": 0.015625,
11
+ "dtype": "bfloat16",
12
+ "embedding_multiplier": 12.0,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 2048,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 8192,
17
+ "logits_scaling": 8.0,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "granite",
21
+ "neuron": {
22
+ "_serialized_key": "NxDNeuronConfig",
23
+ "batch_size": 4,
24
+ "capacity_factor": null,
25
+ "checkpoint_id": "ibm-granite/granite-3.1-2b-instruct",
26
+ "checkpoint_revision": "bbc2aed595bd38bd770263dc3ab831db9794441d",
27
+ "continuous_batching": true,
28
+ "ep_degree": 1,
29
+ "fused_qkv": true,
30
+ "glu_mlp": true,
31
+ "local_ranks_size": 2,
32
+ "max_batch_size": 4,
33
+ "max_context_length": 4096,
34
+ "max_topk": 256,
35
+ "n_active_tokens": 4096,
36
+ "neuronxcc_version": "2.21.33363.0+82129205",
37
+ "on_device_sampling": false,
38
+ "optimum_neuron_version": "0.4.4.dev1",
39
+ "output_logits": false,
40
+ "pp_degree": 1,
41
+ "sequence_length": 4096,
42
+ "sequence_parallel_enabled": false,
43
+ "speculation_length": 0,
44
+ "start_rank_id": 0,
45
+ "target": "trn1",
46
+ "torch_dtype": "bfloat16",
47
+ "tp_degree": 2
48
+ },
49
+ "num_attention_heads": 32,
50
+ "num_hidden_layers": 40,
51
+ "num_key_value_heads": 8,
52
+ "residual_multiplier": 0.22,
53
+ "rms_norm_eps": 1e-05,
54
+ "rope_scaling": null,
55
+ "rope_theta": 5000000.0,
56
+ "tie_word_embeddings": true,
57
+ "use_cache": true,
58
+ "vocab_size": 49155
59
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/llama/unsloth/Llama-3.2-1B-Instruct/9468666db72cf8e90bee.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "dtype": "bfloat16",
11
+ "head_dim": 64,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 2048,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 8192,
16
+ "max_position_embeddings": 131072,
17
+ "mlp_bias": false,
18
+ "model_type": "llama",
19
+ "neuron": {
20
+ "_serialized_key": "NxDNeuronConfig",
21
+ "batch_size": 4,
22
+ "capacity_factor": null,
23
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
24
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
25
+ "continuous_batching": true,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 4,
31
+ "max_context_length": 4096,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 4096,
34
+ "neuronxcc_version": "2.21.33363.0+82129205",
35
+ "on_device_sampling": false,
36
+ "optimum_neuron_version": "0.4.4.dev1",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 4096,
40
+ "sequence_parallel_enabled": false,
41
+ "speculation_length": 0,
42
+ "start_rank_id": 0,
43
+ "target": "trn1",
44
+ "torch_dtype": "bfloat16",
45
+ "tp_degree": 2
46
+ },
47
+ "num_attention_heads": 32,
48
+ "num_hidden_layers": 16,
49
+ "num_key_value_heads": 8,
50
+ "pretraining_tp": 1,
51
+ "rms_norm_eps": 1e-05,
52
+ "rope_scaling": {
53
+ "factor": 32.0,
54
+ "high_freq_factor": 4.0,
55
+ "low_freq_factor": 1.0,
56
+ "original_max_position_embeddings": 8192,
57
+ "rope_type": "llama3"
58
+ },
59
+ "rope_theta": 500000.0,
60
+ "tie_word_embeddings": true,
61
+ "unsloth_fixed": true,
62
+ "use_cache": true,
63
+ "vocab_size": 128256
64
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/phi3/microsoft/Phi-3-mini-4k-instruct/c76fd6924e0386c670aa.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "microsoft/Phi-3-mini-4k-instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {
11
+ "AutoConfig": "configuration_phi3.Phi3Config",
12
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
13
+ },
14
+ "dtype": "bfloat16",
15
+ "embd_pdrop": 0.0,
16
+ "hidden_act": "silu",
17
+ "hidden_size": 3072,
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 8192,
20
+ "max_position_embeddings": 4096,
21
+ "model_type": "phi3",
22
+ "neuron": {
23
+ "_serialized_key": "NxDNeuronConfig",
24
+ "batch_size": 4,
25
+ "capacity_factor": null,
26
+ "checkpoint_id": "microsoft/Phi-3-mini-4k-instruct",
27
+ "checkpoint_revision": "f39ac1d28e925b323eae81227eaba4464caced4e",
28
+ "continuous_batching": true,
29
+ "ep_degree": 1,
30
+ "fused_qkv": true,
31
+ "glu_mlp": true,
32
+ "local_ranks_size": 2,
33
+ "max_batch_size": 4,
34
+ "max_context_length": 4096,
35
+ "max_topk": 256,
36
+ "n_active_tokens": 4096,
37
+ "neuronxcc_version": "2.21.33363.0+82129205",
38
+ "on_device_sampling": false,
39
+ "optimum_neuron_version": "0.4.4.dev1",
40
+ "output_logits": false,
41
+ "pp_degree": 1,
42
+ "sequence_length": 4096,
43
+ "sequence_parallel_enabled": false,
44
+ "speculation_length": 0,
45
+ "start_rank_id": 0,
46
+ "target": "trn1",
47
+ "torch_dtype": "bfloat16",
48
+ "tp_degree": 2
49
+ },
50
+ "num_attention_heads": 32,
51
+ "num_hidden_layers": 32,
52
+ "num_key_value_heads": 32,
53
+ "original_max_position_embeddings": 4096,
54
+ "partial_rotary_factor": 1.0,
55
+ "resid_pdrop": 0.0,
56
+ "rms_norm_eps": 1e-05,
57
+ "rope_scaling": null,
58
+ "rope_theta": 10000.0,
59
+ "sliding_window": 2047,
60
+ "tie_word_embeddings": false,
61
+ "use_cache": true,
62
+ "vocab_size": 32064
63
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev1/smollm3/HuggingFaceTB/SmolLM3-3B/c8931c218bbf1b28c5bd.json ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "HuggingFaceTB/SmolLM3-3B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "SmolLM3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "dtype": "bfloat16",
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 11008,
15
+ "layer_types": [
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention"
52
+ ],
53
+ "max_position_embeddings": 65536,
54
+ "max_window_layers": 28,
55
+ "mlp_bias": false,
56
+ "model_type": "smollm3",
57
+ "neuron": {
58
+ "_serialized_key": "NxDNeuronConfig",
59
+ "batch_size": 4,
60
+ "capacity_factor": null,
61
+ "checkpoint_id": "HuggingFaceTB/SmolLM3-3B",
62
+ "checkpoint_revision": "a07cc9a04f16550a088caea529712d1d335b0ac1",
63
+ "continuous_batching": true,
64
+ "ep_degree": 1,
65
+ "fused_qkv": true,
66
+ "glu_mlp": true,
67
+ "local_ranks_size": 2,
68
+ "max_batch_size": 4,
69
+ "max_context_length": 4096,
70
+ "max_topk": 256,
71
+ "n_active_tokens": 4096,
72
+ "neuronxcc_version": "2.21.33363.0+82129205",
73
+ "on_device_sampling": false,
74
+ "optimum_neuron_version": "0.4.4.dev1",
75
+ "output_logits": false,
76
+ "pp_degree": 1,
77
+ "sequence_length": 4096,
78
+ "sequence_parallel_enabled": false,
79
+ "speculation_length": 0,
80
+ "start_rank_id": 0,
81
+ "target": "trn1",
82
+ "torch_dtype": "bfloat16",
83
+ "tp_degree": 2
84
+ },
85
+ "no_rope_layer_interval": 4,
86
+ "no_rope_layers": [
87
+ 1,
88
+ 1,
89
+ 1,
90
+ 0,
91
+ 1,
92
+ 1,
93
+ 1,
94
+ 0,
95
+ 1,
96
+ 1,
97
+ 1,
98
+ 0,
99
+ 1,
100
+ 1,
101
+ 1,
102
+ 0,
103
+ 1,
104
+ 1,
105
+ 1,
106
+ 0,
107
+ 1,
108
+ 1,
109
+ 1,
110
+ 0,
111
+ 1,
112
+ 1,
113
+ 1,
114
+ 0,
115
+ 1,
116
+ 1,
117
+ 1,
118
+ 0,
119
+ 1,
120
+ 1,
121
+ 1,
122
+ 0
123
+ ],
124
+ "num_attention_heads": 16,
125
+ "num_hidden_layers": 36,
126
+ "num_key_value_heads": 4,
127
+ "pretraining_tp": 2,
128
+ "rms_norm_eps": 1e-06,
129
+ "rope_scaling": null,
130
+ "rope_theta": 5000000.0,
131
+ "sliding_window": null,
132
+ "use_cache": false,
133
+ "use_sliding_window": false,
134
+ "vocab_size": 128256
135
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev2/llama/meta-llama/Meta-Llama-3.1-8B-Instruct/f52d9910d1c9d97af723.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Meta-Llama-3.1-8B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "dtype": "bfloat16",
11
+ "head_dim": 128,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 4096,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 14336,
16
+ "max_position_embeddings": 131072,
17
+ "mlp_bias": false,
18
+ "model_type": "llama",
19
+ "neuron": {
20
+ "_serialized_key": "NxDNeuronConfig",
21
+ "batch_size": 32,
22
+ "capacity_factor": null,
23
+ "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B-Instruct",
24
+ "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659",
25
+ "continuous_batching": true,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 8,
30
+ "max_batch_size": 32,
31
+ "max_context_length": 4096,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 4096,
34
+ "neuronxcc_version": "2.21.33363.0+82129205",
35
+ "on_device_sampling": false,
36
+ "optimum_neuron_version": "0.4.4.dev2",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 4096,
40
+ "sequence_parallel_enabled": false,
41
+ "speculation_length": 0,
42
+ "start_rank_id": 0,
43
+ "target": "trn1",
44
+ "torch_dtype": "bfloat16",
45
+ "tp_degree": 8
46
+ },
47
+ "num_attention_heads": 32,
48
+ "num_hidden_layers": 32,
49
+ "num_key_value_heads": 8,
50
+ "pretraining_tp": 1,
51
+ "rms_norm_eps": 1e-05,
52
+ "rope_scaling": {
53
+ "factor": 8.0,
54
+ "high_freq_factor": 4.0,
55
+ "low_freq_factor": 1.0,
56
+ "original_max_position_embeddings": 8192,
57
+ "rope_type": "llama3"
58
+ },
59
+ "rope_theta": 500000.0,
60
+ "tie_word_embeddings": false,
61
+ "use_cache": true,
62
+ "vocab_size": 128256
63
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.4.dev2/llama/unsloth/Llama-3.2-1B-Instruct/6b58992358ceb302871f.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "dtype": "bfloat16",
11
+ "head_dim": 64,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 2048,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 8192,
16
+ "max_position_embeddings": 131072,
17
+ "mlp_bias": false,
18
+ "model_type": "llama",
19
+ "neuron": {
20
+ "_serialized_key": "NxDNeuronConfig",
21
+ "batch_size": 4,
22
+ "capacity_factor": null,
23
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
24
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
25
+ "continuous_batching": true,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 4,
31
+ "max_context_length": 4096,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 4096,
34
+ "neuronxcc_version": "2.21.33363.0+82129205",
35
+ "on_device_sampling": false,
36
+ "optimum_neuron_version": "0.4.4.dev2",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 4096,
40
+ "sequence_parallel_enabled": false,
41
+ "speculation_length": 0,
42
+ "start_rank_id": 0,
43
+ "target": "trn1",
44
+ "torch_dtype": "bfloat16",
45
+ "tp_degree": 2
46
+ },
47
+ "num_attention_heads": 32,
48
+ "num_hidden_layers": 16,
49
+ "num_key_value_heads": 8,
50
+ "pretraining_tp": 1,
51
+ "rms_norm_eps": 1e-05,
52
+ "rope_scaling": {
53
+ "factor": 32.0,
54
+ "high_freq_factor": 4.0,
55
+ "low_freq_factor": 1.0,
56
+ "original_max_position_embeddings": 8192,
57
+ "rope_type": "llama3"
58
+ },
59
+ "rope_theta": 500000.0,
60
+ "tie_word_embeddings": true,
61
+ "unsloth_fixed": true,
62
+ "use_cache": true,
63
+ "vocab_size": 128256
64
+ }
neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e345881b258c1681ba0f32fff9a68af5ef601037728aa658e43902ca1367f6b5
3
+ size 469824
neuronxcc-2.21.33363.0+82129205/MODULE_07b31a4647d204c9e0b6+24129607/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc11433bb9fc0decfbaac5eba6dd51d6fd0d0a08302227d0c5773cb63650840b
3
+ size 41544704
neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95b52bb25f4cffe3256b978fbd1d28251b62ad5e5e6567f8a72c988c0773db6e
3
+ size 1115300
neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36abdd1924a09a40c8881de6092aa997d0c3383d5aa3c4961ff3306b1e0a8eae
3
+ size 6749184
neuronxcc-2.21.33363.0+82129205/MODULE_23c252d2e1c627086de5+a02c3a36/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b8d20fc6e85238b52c8d9c7ddec28f505a1b493e3fb7589dc65e4741195f0a5
3
+ size 6933139
neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a38afe6d7755b3854989b78c8e6144ea5f0496c35eb945a2f554f05305383bb
3
+ size 387883
neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70030484220f658f48d6402e159c5eb4690dd4262a3d1907296d0eb6ee63547b
3
+ size 2939904
neuronxcc-2.21.33363.0+82129205/MODULE_3e0552e84c0922533e51+a02c3a36/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4701a18744940774dd6529471e1e0cbdbbec2071eaf9d636590ff4c25c029c6f
3
+ size 3013890
neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48376071a4903c04e14ec5d1733b426d324003c9392d46b6793b79af6d98b028
3
+ size 884130
neuronxcc-2.21.33363.0+82129205/MODULE_46af0f795cf30d05b36e+24129607/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0aa72846d1bff0674ceee812f7a8f743061a1bd918e4d219651b1046c1747662
3
+ size 5192704
neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae0f9a9a81f29f76c3d97f16e4f250edae4aee263216b144fd949b8080264604
3
+ size 1084587
neuronxcc-2.21.33363.0+82129205/MODULE_6760fbbd001e66ff21db+24129607/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23b91765afe49c8e6462f678b6acc3be2400a38357af326604210328a5a20293
3
+ size 5807104
neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b08660f19cf8f59f4a1901ebf6f82e2b9e7681805ab114cc76ca4fe48fb37746
3
+ size 832979
neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47c80b5bbe3687fcbeb43051790f19ac8d29f7f850397ba25f08e0e17b2d2ba8
3
+ size 6605824
neuronxcc-2.21.33363.0+82129205/MODULE_aa1a56237e3ad1974672+a02c3a36/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e63fd4d850071fa0c6533f3131bdac8bee508de2f6a1821183f07bb15955e54
3
+ size 6772233
neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c36c3ae2befa20992b87f7eb3a8b50f9c5560b478477c0301a689a66f3fd53be
3
+ size 661369
neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0450789df9d43606b7e9b514575afe9c978dea0fa49bc4a17345e6e39a02512
3
+ size 12401664
neuronxcc-2.21.33363.0+82129205/MODULE_c3ed26835961cb74a993+a02c3a36/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07f500e6454dee5d5fb44419de47072e3cd3e5014f9d3f225515a5f8b8ee1782
3
+ size 12548565
neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4cf3533da5767894877c9d74d4264efc97a93ef15f7254359399d3e56600101a
3
+ size 1021532
neuronxcc-2.21.33363.0+82129205/MODULE_e31397d074de214d8505+24129607/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2e04324b65c1b9bdfe725ea67091ac79e160a504b10ea43d3af075e8b8e2437
3
+ size 9360384