peder commited on
Commit
ba2588f
·
1 Parent(s): 316436b

print nono

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -9,16 +9,16 @@ import torch
9
  from transformers import pipeline, set_seed
10
  from transformers import AutoTokenizer, AutoModelForCausalLM
11
 
12
- #import torch
13
- print(f"Is CUDA available: {torch.cuda.is_available()}")
14
- # True
15
- print(
16
- f"CUDA device for you Perrito: {torch.cuda.get_device_name(torch.cuda.current_device())}")
17
- # Tesla T4
18
 
19
  HF_AUTH_TOKEN = "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"
20
  DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
21
- print("DEVICE SENOOOOOR", DEVICE)
22
  DTYPE = torch.float32 if DEVICE == "cpu" else torch.float16
23
  MODEL_NAME = os.environ.get("MODEL_NAME", "NbAiLab/nb-gpt-j-6B-alpaca")
24
  MAX_LENGTH = int(os.environ.get("MAX_LENGTH", 256))
 
9
  from transformers import pipeline, set_seed
10
  from transformers import AutoTokenizer, AutoModelForCausalLM
11
 
12
+ # #import torch
13
+ # print(f"Is CUDA available: {torch.cuda.is_available()}")
14
+ # # True
15
+ # print(
16
+ # f"CUDA device for you Perrito: {torch.cuda.get_device_name(torch.cuda.current_device())}")
17
+ # # Tesla T4
18
 
19
  HF_AUTH_TOKEN = "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"
20
  DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
21
+ #print("DEVICE SENOOOOOR", DEVICE)
22
  DTYPE = torch.float32 if DEVICE == "cpu" else torch.float16
23
  MODEL_NAME = os.environ.get("MODEL_NAME", "NbAiLab/nb-gpt-j-6B-alpaca")
24
  MAX_LENGTH = int(os.environ.get("MAX_LENGTH", 256))