MaxGab commited on
Commit
ed02bf5
·
verified ·
1 Parent(s): 13182c6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -13,9 +13,14 @@ if not hasattr(config, 'layerdrop'):
13
  if not hasattr(config, 'max_position_embeddings'):
14
  config.max_position_embeddings = 2048
15
  if not hasattr(config, 'num_attention_heads'):
16
- config.num_attention_heads = 12
17
  if not hasattr(config, 'num_hidden_layers'):
18
- config.num_hidden_layers = 12
 
 
 
 
 
19
 
20
  model = AutoModelForCausalLM.from_pretrained(model_name, config=config)
21
  tokenizer = AutoTokenizer.from_pretrained(model_name)
 
13
  if not hasattr(config, 'max_position_embeddings'):
14
  config.max_position_embeddings = 2048
15
  if not hasattr(config, 'num_attention_heads'):
16
+ config.num_attention_heads = 16
17
  if not hasattr(config, 'num_hidden_layers'):
18
+ config.num_hidden_layers = 24
19
+ if not hasattr(config, 'num_hidden_layers'):
20
+ config.num_hidden_layers = 24
21
+ if not hasattr(config, 'hidden_size'):
22
+ config.hidden_size = 1024
23
+
24
 
25
  model = AutoModelForCausalLM.from_pretrained(model_name, config=config)
26
  tokenizer = AutoTokenizer.from_pretrained(model_name)