vaibhaviiii28 commited on
Commit
77e84bd
Β·
verified Β·
1 Parent(s): 90ce2fd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -16
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from fastapi import FastAPI, File, UploadFile
2
  from pydantic import BaseModel
3
  from transformers import pipeline
@@ -6,36 +7,33 @@ import joblib
6
  import re
7
  import string
8
  import io
9
- import os
10
  import uvicorn
11
- import os
12
- # βœ… Set Hugging Face Cache Directory (Fixes Permission Error)
13
- import os
14
-
15
- # βœ… Set Hugging Face Cache to a writable directory
16
- os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache"
17
- os.environ["HF_HOME"] = "/tmp/hf_cache"
18
-
19
-
20
-
21
 
 
 
 
22
 
23
  # βœ… Initialize FastAPI
24
  app = FastAPI()
25
- pipe = pipeline("image-classification", model="LukeJacob2023/nsfw-image-detector",cache_dir="/tmp")
26
- # βœ… Load NSFW Image Classification Model (with custom cache directory)
27
 
 
 
 
 
 
 
 
28
 
29
  # βœ… Load Toxic Text Classification Model
30
  try:
31
  model = joblib.load("toxic_classifier.pkl")
32
  vectorizer = joblib.load("vectorizer.pkl")
33
- print("βœ… Model & Vectorizer Loaded Successfully!")
34
  except Exception as e:
35
- print(f"❌ Error: {e}")
36
  exit(1)
37
 
38
- # πŸ“Œ Text Input Data Model
39
  class TextInput(BaseModel):
40
  text: str
41
 
 
1
+ import os
2
  from fastapi import FastAPI, File, UploadFile
3
  from pydantic import BaseModel
4
  from transformers import pipeline
 
7
  import re
8
  import string
9
  import io
 
10
  import uvicorn
 
 
 
 
 
 
 
 
 
 
11
 
12
+ # βœ… Set Hugging Face Cache to a writable directory (Fixes Permission Error)
13
+ os.environ["TRANSFORMERS_CACHE"] = "/tmp"
14
+ os.environ["HF_HOME"] = "/tmp"
15
 
16
  # βœ… Initialize FastAPI
17
  app = FastAPI()
 
 
18
 
19
+ # βœ… Load NSFW Image Classification Model
20
+ try:
21
+ pipe = pipeline("image-classification", model="LukeJacob2023/nsfw-image-detector", cache_dir="/tmp")
22
+ print("βœ… NSFW Model Loaded Successfully!")
23
+ except Exception as e:
24
+ print(f"❌ Error Loading NSFW Model: {e}")
25
+ exit(1)
26
 
27
  # βœ… Load Toxic Text Classification Model
28
  try:
29
  model = joblib.load("toxic_classifier.pkl")
30
  vectorizer = joblib.load("vectorizer.pkl")
31
+ print("βœ… Toxic Text Model & Vectorizer Loaded Successfully!")
32
  except Exception as e:
33
+ print(f"❌ Error Loading Toxic Text Model: {e}")
34
  exit(1)
35
 
36
+ # πŸ“Œ Text Input Model
37
  class TextInput(BaseModel):
38
  text: str
39