CodCodingCode commited on
Commit
6555fdc
Β·
1 Parent(s): 2ebe745

added debugging print statements

Browse files
Files changed (1) hide show
  1. app.py +9 -0
app.py CHANGED
@@ -20,9 +20,14 @@ local_cache = snapshot_download(
20
  f"{SUBFOLDER}/*.safetensors",
21
  ],
22
  )
 
 
 
23
 
24
  # β€”β€”β€” 2) Point MODEL_DIR at that subfolder β€”β€”β€”
25
  MODEL_DIR = os.path.join(local_cache, SUBFOLDER)
 
 
26
 
27
  # β€”β€”β€” 3) Load tokenizer & model from disk β€”β€”β€”
28
  tokenizer = AutoTokenizer.from_pretrained(
@@ -30,6 +35,7 @@ tokenizer = AutoTokenizer.from_pretrained(
30
  use_fast=False,
31
  trust_remote_code=True,
32
  )
 
33
 
34
  model = AutoModelForCausalLM.from_pretrained(
35
  MODEL_DIR,
@@ -38,6 +44,7 @@ model = AutoModelForCausalLM.from_pretrained(
38
  trust_remote_code=True,
39
  )
40
  model.eval()
 
41
 
42
 
43
  # === Role Agent with instruction/input/output format ===
@@ -54,7 +61,9 @@ class RoleAgent:
54
  f"Output:"
55
  )
56
  print("__file__ is:", __file__)
 
57
  print("tokenizer is:", self.tokenizer, "β€” type:", type(self.tokenizer))
 
58
  encoding = self.tokenizer(prompt, return_tensors="pt")
59
  inputs = {k: v.to(self.model.device) for k, v in encoding.items()}
60
 
 
20
  f"{SUBFOLDER}/*.safetensors",
21
  ],
22
  )
23
+ print("[DEBUG] snapshot_download β†’ local_cache:", local_cache)
24
+ import pathlib
25
+ print("[DEBUG] MODEL root contents:", list(pathlib.Path(local_cache).glob(f"{SUBFOLDER}/*")))
26
 
27
  # β€”β€”β€” 2) Point MODEL_DIR at that subfolder β€”β€”β€”
28
  MODEL_DIR = os.path.join(local_cache, SUBFOLDER)
29
+ print("[DEBUG] MODEL_DIR:", MODEL_DIR)
30
+ print("[DEBUG] MODEL_DIR files:", os.listdir(MODEL_DIR))
31
 
32
  # β€”β€”β€” 3) Load tokenizer & model from disk β€”β€”β€”
33
  tokenizer = AutoTokenizer.from_pretrained(
 
35
  use_fast=False,
36
  trust_remote_code=True,
37
  )
38
+ print("[DEBUG] Loaded tokenizer object:", tokenizer, "type:", type(tokenizer))
39
 
40
  model = AutoModelForCausalLM.from_pretrained(
41
  MODEL_DIR,
 
44
  trust_remote_code=True,
45
  )
46
  model.eval()
47
+ print("[DEBUG] Loaded model object:", model.__class__.__name__, "device:", next(model.parameters()).device)
48
 
49
 
50
  # === Role Agent with instruction/input/output format ===
 
61
  f"Output:"
62
  )
63
  print("__file__ is:", __file__)
64
+ print("At import, tokenizer is:", tokenizer, type(tokenizer))
65
  print("tokenizer is:", self.tokenizer, "β€” type:", type(self.tokenizer))
66
+ print("[DEBUG] prompt:", prompt)
67
  encoding = self.tokenizer(prompt, return_tensors="pt")
68
  inputs = {k: v.to(self.model.device) for k, v in encoding.items()}
69