|
{ |
|
"module": "keras_nlp.src.models.gemma.gemma_causal_lm", |
|
"class_name": "GemmaCausalLM", |
|
"config": { |
|
"backbone": { |
|
"module": "keras_nlp.src.models.gemma.gemma_backbone", |
|
"class_name": "GemmaBackbone", |
|
"config": { |
|
"name": "gemma_backbone", |
|
"trainable": true, |
|
"vocabulary_size": 256000, |
|
"num_layers": 26, |
|
"num_query_heads": 8, |
|
"num_key_value_heads": 4, |
|
"hidden_dim": 2304, |
|
"intermediate_dim": 18432, |
|
"head_dim": 256, |
|
"layer_norm_epsilon": 1e-06, |
|
"dropout": 0, |
|
"query_head_dim_normalize": true, |
|
"use_post_ffw_norm": true, |
|
"use_post_attention_norm": true, |
|
"final_logit_soft_cap": 30.0, |
|
"attention_logit_soft_cap": 50.0, |
|
"sliding_window_size": 4096, |
|
"use_sliding_window_attention": true |
|
}, |
|
"registered_name": "keras_nlp>GemmaBackbone" |
|
}, |
|
"preprocessor": { |
|
"module": "keras_nlp.src.models.gemma.gemma_causal_lm_preprocessor", |
|
"class_name": "GemmaCausalLMPreprocessor", |
|
"config": { |
|
"name": "gemma_causal_lm_preprocessor_1", |
|
"trainable": true, |
|
"dtype": { |
|
"module": "keras", |
|
"class_name": "DTypePolicy", |
|
"config": { |
|
"name": "float32" |
|
}, |
|
"registered_name": null |
|
}, |
|
"tokenizer": { |
|
"module": "keras_nlp.src.models.gemma.gemma_tokenizer", |
|
"class_name": "GemmaTokenizer", |
|
"config": { |
|
"name": "gemma_tokenizer", |
|
"trainable": true, |
|
"dtype": { |
|
"module": "keras", |
|
"class_name": "DTypePolicy", |
|
"config": { |
|
"name": "int32" |
|
}, |
|
"registered_name": null |
|
}, |
|
"proto": null, |
|
"sequence_length": null, |
|
"add_bos": false, |
|
"add_eos": false |
|
}, |
|
"registered_name": "keras_nlp>GemmaTokenizer" |
|
}, |
|
"sequence_length": 1024, |
|
"add_start_token": true, |
|
"add_end_token": true |
|
}, |
|
"registered_name": "keras_nlp>GemmaCausalLMPreprocessor" |
|
}, |
|
"name": "gemma_causal_lm_1" |
|
}, |
|
"registered_name": "keras_nlp>GemmaCausalLM" |
|
} |