model_config.json 474 B

1234567891011121314151617181920
  1. {
  2. "init_args": [
  3. {
  4. "attention_probs_dropout_prob": 0.1,
  5. "hidden_act": "gelu",
  6. "hidden_dropout_prob": 0.1,
  7. "hidden_size": 768,
  8. "initializer_range": 0.02,
  9. "max_position_embeddings": 2048,
  10. "num_attention_heads": 12,
  11. "num_hidden_layers": 12,
  12. "task_type_vocab_size": 3,
  13. "type_vocab_size": 4,
  14. "use_task_id": true,
  15. "vocab_size": 40000,
  16. "init_class": "ErnieModel"
  17. }
  18. ],
  19. "init_class": "UIE"
  20. }