from transformers import ( | |
AutoModelForCausalLM, | |
AutoTokenizer, | |
AutoTokenizer, | |
) | |
import torch | |
d_map = {"": torch.cuda.current_device()} if torch.cuda.is_available() else None | |
merged_model_path = "outputs/merged" # Path to the combined weights | |
repo_name = "Financial_Analyst" # HuggingFace repo name | |
model = AutoModelForCausalLM.from_pretrained( | |
merged_model_path, | |
ignore_mismatched_sizes=True, | |
from_tf=True, | |
trust_remote_code=True, | |
device_map=d_map, | |
torch_dtype=torch.float16, | |
).eval() | |
tokenizer = AutoTokenizer.from_pretrained(merged_model_path) | |
model.push_to_hub(repo_name, token=hf_token) | |
tokenizer.push_to_hub(repo_name, token=hf_token) | |