ejschwartz commited on
Commit
347b36e
·
1 Parent(s): 084dc17
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -36,11 +36,12 @@ def predict(type, normalized_asm):
36
 
37
  tokenizer_output = nova_tokenizer.encode(inputs, '', char_types)
38
  input_ids = torch.LongTensor(tokenizer_output['input_ids'].tolist()).unsqueeze(0)
 
39
  nova_attention_mask = torch.LongTensor(tokenizer_output['nova_attention_mask']).unsqueeze(0)
40
 
41
  output = model.generate(
42
  inputs=input_ids.cuda(), max_new_tokens=512, temperature=0.2, top_p=0.95,
43
- num_return_sequences=20, do_sample=True, nova_attention_mask=nova_attention_mask.cuda(),
44
  no_mask_idx=torch.LongTensor([tokenizer_output['no_mask_idx']]).cuda(),
45
  pad_token_id=tokenizer.pad_token_id, eos_token_id=tokenizer.eos_token_id
46
  )
 
36
 
37
  tokenizer_output = nova_tokenizer.encode(inputs, '', char_types)
38
  input_ids = torch.LongTensor(tokenizer_output['input_ids'].tolist()).unsqueeze(0)
39
+ print("Input IDs:", input_ids.shape)
40
  nova_attention_mask = torch.LongTensor(tokenizer_output['nova_attention_mask']).unsqueeze(0)
41
 
42
  output = model.generate(
43
  inputs=input_ids.cuda(), max_new_tokens=512, temperature=0.2, top_p=0.95,
44
+ num_return_sequences=1, do_sample=True, nova_attention_mask=nova_attention_mask.cuda(),
45
  no_mask_idx=torch.LongTensor([tokenizer_output['no_mask_idx']]).cuda(),
46
  pad_token_id=tokenizer.pad_token_id, eos_token_id=tokenizer.eos_token_id
47
  )