| { | |
| "_name_or_path": "checkpoints-pos/checkpoint-3052", | |
| "architectures": [ | |
| "XLMRobertaForTokenClassification" | |
| ], | |
| "attention_probs_dropout_prob": 0.1, | |
| "bos_token_id": 0, | |
| "classifier_dropout": null, | |
| "eos_token_id": 2, | |
| "finetuning_task": "ner", | |
| "hidden_act": "gelu", | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 768, | |
| "id2label": { | |
| "0": "B-A", | |
| "1": "B-Ab", | |
| "2": "B-B", | |
| "3": "B-C", | |
| "4": "B-CH", | |
| "5": "B-Cb", | |
| "6": "B-Cc", | |
| "7": "B-E", | |
| "8": "B-Eb", | |
| "9": "B-I", | |
| "10": "B-L", | |
| "11": "B-M", | |
| "12": "B-Mb", | |
| "13": "B-N", | |
| "14": "B-Nb", | |
| "15": "B-Nc", | |
| "16": "B-Ni", | |
| "17": "B-Np", | |
| "18": "B-Nu", | |
| "19": "B-Ny", | |
| "20": "B-P", | |
| "21": "B-Pb", | |
| "22": "B-R", | |
| "23": "B-T", | |
| "24": "B-V", | |
| "25": "B-Vb", | |
| "26": "B-Vy", | |
| "27": "B-X", | |
| "28": "B-Xy", | |
| "29": "B-Y", | |
| "30": "B-Z", | |
| "31": "I-A", | |
| "32": "I-B", | |
| "33": "I-C", | |
| "34": "I-CH", | |
| "35": "I-Cc", | |
| "36": "I-E", | |
| "37": "I-I", | |
| "38": "I-L", | |
| "39": "I-M", | |
| "40": "I-N", | |
| "41": "I-Nb", | |
| "42": "I-Nc", | |
| "43": "I-Np", | |
| "44": "I-Nu", | |
| "45": "I-Ny", | |
| "46": "I-P", | |
| "47": "I-R", | |
| "48": "I-T", | |
| "49": "I-V", | |
| "50": "I-Vb", | |
| "51": "I-X", | |
| "52": "I-Y", | |
| "53": "I-Z" | |
| }, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 3072, | |
| "label2id": { | |
| "B-A": 0, | |
| "B-Ab": 1, | |
| "B-B": 2, | |
| "B-C": 3, | |
| "B-CH": 4, | |
| "B-Cb": 5, | |
| "B-Cc": 6, | |
| "B-E": 7, | |
| "B-Eb": 8, | |
| "B-I": 9, | |
| "B-L": 10, | |
| "B-M": 11, | |
| "B-Mb": 12, | |
| "B-N": 13, | |
| "B-Nb": 14, | |
| "B-Nc": 15, | |
| "B-Ni": 16, | |
| "B-Np": 17, | |
| "B-Nu": 18, | |
| "B-Ny": 19, | |
| "B-P": 20, | |
| "B-Pb": 21, | |
| "B-R": 22, | |
| "B-T": 23, | |
| "B-V": 24, | |
| "B-Vb": 25, | |
| "B-Vy": 26, | |
| "B-X": 27, | |
| "B-Xy": 28, | |
| "B-Y": 29, | |
| "B-Z": 30, | |
| "I-A": 31, | |
| "I-B": 32, | |
| "I-C": 33, | |
| "I-CH": 34, | |
| "I-Cc": 35, | |
| "I-E": 36, | |
| "I-I": 37, | |
| "I-L": 38, | |
| "I-M": 39, | |
| "I-N": 40, | |
| "I-Nb": 41, | |
| "I-Nc": 42, | |
| "I-Np": 43, | |
| "I-Nu": 44, | |
| "I-Ny": 45, | |
| "I-P": 46, | |
| "I-R": 47, | |
| "I-T": 48, | |
| "I-V": 49, | |
| "I-Vb": 50, | |
| "I-X": 51, | |
| "I-Y": 52, | |
| "I-Z": 53 | |
| }, | |
| "layer_norm_eps": 1e-12, | |
| "max_position_embeddings": 514, | |
| "model_type": "xlm-roberta", | |
| "num_attention_heads": 12, | |
| "num_hidden_layers": 12, | |
| "pad_token_id": 1, | |
| "position_embedding_type": "absolute", | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.45.1", | |
| "type_vocab_size": 2, | |
| "use_cache": true, | |
| "vocab_size": 15004 | |
| } | |