{ | |
"module": "keras_nlp.src.models.gemma.gemma_causal_lm_preprocessor", | |
"class_name": "GemmaCausalLMPreprocessor", | |
"config": { | |
"name": "gemma_causal_lm_preprocessor", | |
"trainable": true, | |
"dtype": "float32", | |
"tokenizer": { | |
"module": "keras_nlp.src.models.gemma.gemma_tokenizer", | |
"class_name": "GemmaTokenizer", | |
"config": { | |
"name": "gemma_tokenizer", | |
"trainable": true, | |
"dtype": "int32", | |
"proto": null, | |
"sequence_length": null | |
}, | |
"registered_name": "keras_nlp>GemmaTokenizer" | |
}, | |
"sequence_length": 8192, | |
"add_start_token": true, | |
"add_end_token": true | |
}, | |
"registered_name": "keras_nlp>GemmaCausalLMPreprocessor" | |
} |