model_config.json 507 B

123456789101112131415161718192021
  1. {
  2. "init_args": [
  3. {
  4. "attention_probs_dropout_prob": 0.1,
  5. "hidden_act": "gelu",
  6. "hidden_dropout_prob": 0.1,
  7. "hidden_size": 384,
  8. "intermediate_size": 1536,
  9. "initializer_range": 0.02,
  10. "max_position_embeddings": 2048,
  11. "num_attention_heads": 12,
  12. "num_hidden_layers": 6,
  13. "task_type_vocab_size": 16,
  14. "type_vocab_size": 4,
  15. "use_task_id": true,
  16. "vocab_size": 40000,
  17. "init_class": "ErnieModel"
  18. }
  19. ],
  20. "init_class": "UIE"
  21. }