# /// script # requires-python = ">=3.12" # dependencies = [ # "transformers", # "torch", # ] # /// # Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="GSAI-ML/LLaDA-8B-Instruct", trust_remote_code=True) messages = [ {"role": "user", "content": "Who are you?"}, ] pipe(messages) # Load model directly from transformers import AutoModelForCausalLM model = AutoModelForCausalLM.from_pretrained("GSAI-ML/LLaDA-8B-Instruct", trust_remote_code=True)