bearking58 commited on
Commit
33eefe1
·
1 Parent(s): eeb9bac

Revert "fix: typo on hf_token env name"

Browse files

This reverts commit 469758138e209ba717d938e6fb3f36ef156a93fa.

cloudbuild.yaml CHANGED
@@ -10,7 +10,7 @@ steps:
10
  "us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest",
11
  ".",
12
  ]
13
- secretEnv: ["_HF_TOKEN"]
14
 
15
  - name: "gcr.io/cloud-builders/docker"
16
  args:
@@ -25,4 +25,4 @@ images:
25
  availableSecrets:
26
  secretManager:
27
  - versionName: "projects/${PROJECT_ID}/secrets/HF_TOKEN/versions/1"
28
- env: "_HF_TOKEN"
 
10
  "us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest",
11
  ".",
12
  ]
13
+ secretEnv: ["HF_TOKEN"]
14
 
15
  - name: "gcr.io/cloud-builders/docker"
16
  args:
 
25
  availableSecrets:
26
  secretManager:
27
  - versionName: "projects/${PROJECT_ID}/secrets/HF_TOKEN/versions/1"
28
+ env: "HF_TOKEN"
core-model-prediction/Dockerfile CHANGED
@@ -20,6 +20,9 @@ RUN python -m nltk.downloader punkt wordnet averaged_perceptron_tagger
20
  # Unzip wordnet
21
  RUN unzip /root/nltk_data/corpora/wordnet.zip -d /root/nltk_data/corpora/
22
 
 
 
 
23
  # Download HuggingFace model
24
  RUN python -c "from transformers import AutoTokenizer, AutoModelForCausalLM; \
25
  tokenizer = AutoTokenizer.from_pretrained('google/gemma-2b', token='$HF_TOKEN'); \
 
20
  # Unzip wordnet
21
  RUN unzip /root/nltk_data/corpora/wordnet.zip -d /root/nltk_data/corpora/
22
 
23
+ # Print masked HF Token for debugging
24
+ RUN echo ${HF_TOKEN:0:4}********${HF_TOKEN: -4}
25
+
26
  # Download HuggingFace model
27
  RUN python -c "from transformers import AutoTokenizer, AutoModelForCausalLM; \
28
  tokenizer = AutoTokenizer.from_pretrained('google/gemma-2b', token='$HF_TOKEN'); \