MaxGab commited on
Commit
13182c6
·
verified ·
1 Parent(s): f99d148

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -13,9 +13,9 @@ if not hasattr(config, 'layerdrop'):
13
  if not hasattr(config, 'max_position_embeddings'):
14
  config.max_position_embeddings = 2048
15
  if not hasattr(config, 'num_attention_heads'):
16
- config.num_attention_heads = 1024
17
  if not hasattr(config, 'num_hidden_layers'):
18
- config.num_hidden_layers = 1024
19
 
20
  model = AutoModelForCausalLM.from_pretrained(model_name, config=config)
21
  tokenizer = AutoTokenizer.from_pretrained(model_name)
 
13
  if not hasattr(config, 'max_position_embeddings'):
14
  config.max_position_embeddings = 2048
15
  if not hasattr(config, 'num_attention_heads'):
16
+ config.num_attention_heads = 12
17
  if not hasattr(config, 'num_hidden_layers'):
18
+ config.num_hidden_layers = 12
19
 
20
  model = AutoModelForCausalLM.from_pretrained(model_name, config=config)
21
  tokenizer = AutoTokenizer.from_pretrained(model_name)