{ "architectures": ["GPT2LMHeadModel"], "model_type": "gpt2", "vocab_size": 10000, "n_embd": 256, "n_layer": 2, "n_positions": 128, "n_inner": 1024, "n_head": 4, "activation_function": "relu", "bos_token_id": null, "eos_token_id": null, "pad_token_id": 0, "unk_token_id": 1, "mask_token_id": 2, "torch_dtype": "float32" }