| { | |
| "architectures": [ | |
| "MambaForHF" | |
| ], | |
| "bos_token_id": 1, | |
| "d_discr": 256, | |
| "d_input": 2048, | |
| "d_model": 4096, | |
| "d_state": 1024, | |
| "eos_token_id": 2, | |
| "ker_size": 4, | |
| "max_position_embeddings": 131072, | |
| "num_layers": 64, | |
| "pad_token_id": 0, | |
| "parallel": true, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.44.2", | |
| "vocab_size": 128256 | |
| } | |