{ "module": "keras_nlp.src.models.phi3.phi3_causal_lm", "class_name": "Phi3CausalLM", "config": { "backbone": { "module": "keras_nlp.src.models.phi3.phi3_backbone", "class_name": "Phi3Backbone", "config": { "name": "phi3_backbone_1", "trainable": true, "vocabulary_size": 32064, "num_layers": 32, "num_query_heads": 32, "hidden_dim": 3072, "intermediate_dim": 8192, "num_key_value_heads": 32, "layer_norm_epsilon": 1e-05, "dropout": 0.0, "max_sequence_length": 4096, "pretraining_sequence_length": 4096, "rope_max_wavelength": 10000.0, "rope_scaling_type": null, "rope_scaling_short_factor": null, "rope_scaling_long_factor": null }, "registered_name": "keras_nlp>Phi3Backbone" }, "preprocessor": { "module": "keras_nlp.src.models.phi3.phi3_causal_lm_preprocessor", "class_name": "Phi3CausalLMPreprocessor", "config": { "name": "phi3_causal_lm_preprocessor", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "mixed_bfloat16" }, "registered_name": null }, "tokenizer": { "module": "keras_nlp.src.models.phi3.phi3_tokenizer", "class_name": "Phi3Tokenizer", "config": { "name": "phi3_tokenizer", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "int32" }, "registered_name": null }, "proto": null, "sequence_length": null }, "registered_name": "keras_nlp>Phi3Tokenizer" }, "sequence_length": 1024, "add_start_token": true, "add_end_token": false }, "registered_name": "keras_nlp>Phi3CausalLMPreprocessor" }, "name": "phi3_causal_lm" }, "registered_name": "keras_nlp>Phi3CausalLM" }