Saadi07 commited on
Commit
bdca5e1
·
1 Parent(s): 1f19f01
Files changed (1) hide show
  1. app.py +5 -7
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  from PIL import Image
3
  import torch
4
- from transformers import AutoProcessor, AutoModelForCausalLM, BlipProcessor
5
  import os
6
 
7
  # Check if we're running on CPU or GPU
@@ -22,21 +22,19 @@ except Exception as e:
22
  # Load base model - use the smallest possible model for CPU
23
  try:
24
  # Try loading the smallest BLIP model
25
- model = AutoModelForCausalLM.from_pretrained(
26
  "Salesforce/blip-image-captioning-base",
27
- device_map=None,
28
  torch_dtype=torch.float32 # Use float32 for CPU compatibility
29
  )
30
  print("Loaded base BLIP model")
31
  except Exception as e:
32
  print(f"Error loading model: {e}")
33
- # If that fails, load an even smaller model
34
- model = AutoModelForCausalLM.from_pretrained(
35
  "Salesforce/blip-image-captioning-base",
36
- device_map=None,
37
  low_cpu_mem_usage=True
38
  )
39
- print("Loaded fallback model")
40
 
41
  # Move model to device if needed
42
  model = model.to(device)
 
1
  import gradio as gr
2
  from PIL import Image
3
  import torch
4
+ from transformers import AutoProcessor, BlipForConditionalGeneration, BlipProcessor
5
  import os
6
 
7
  # Check if we're running on CPU or GPU
 
22
  # Load base model - use the smallest possible model for CPU
23
  try:
24
  # Try loading the smallest BLIP model
25
+ model = BlipForConditionalGeneration.from_pretrained(
26
  "Salesforce/blip-image-captioning-base",
 
27
  torch_dtype=torch.float32 # Use float32 for CPU compatibility
28
  )
29
  print("Loaded base BLIP model")
30
  except Exception as e:
31
  print(f"Error loading model: {e}")
32
+ # If that fails, load with low memory usage
33
+ model = BlipForConditionalGeneration.from_pretrained(
34
  "Salesforce/blip-image-captioning-base",
 
35
  low_cpu_mem_usage=True
36
  )
37
+ print("Loaded fallback model with low memory settings")
38
 
39
  # Move model to device if needed
40
  model = model.to(device)