Spaces:
Sleeping
Sleeping
File size: 1,857 Bytes
45c55c1 b0dd80d 45c55c1 b0dd80d b4afb28 66c7336 b4afb28 66c7336 b4afb28 66c7336 b4afb28 b0dd80d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
from witness.witness_protocol import ABRAHAMIC_SYSTEM_PROMPT, witness_review
# Replace this import with your actual R-Zero interface
# e.g., from rzero_client import generate_response
def query_rzero_with_witness(user_input: str) -> str:
"""
Prepends covenant system prompt to the user input,
sends the combined request to R‑Zero, and applies Witness review
to the returned answer.
"""
# Combine the covenant framing with the user’s request
full_prompt = f"{ABRAHAMIC_SYSTEM_PROMPT}\n\nUser: {user_input}"
# Call the R‑Zero engine here (placeholder call)
# rzero_output = generate_response(full_prompt)
rzero_output = "[R‑Zero output placeholder]"
# Pass through Witness review before returning to caller/UI
return witness_review(rzero_output)
# -------------------------
# New class wrapper for app.py usage
# -------------------------
import os
from huggingface_hub import InferenceClient
from witness.witness_protocol import ABRAHAMIC_SYSTEM_PROMPT, witness_review
class WitnessRZero:
def __init__(self, device="cpu", model_id="your-featherless-ai-model-id"):
self.device = device
self.client = InferenceClient(
model_id,
token=os.getenv("HUGGINGFACEHUB_API_TOKEN") # must match repo secret name
)
def generate(self, user_input: str, **kwargs) -> str:
full_prompt = f"{ABRAHAMIC_SYSTEM_PROMPT}\n\nUser: {user_input}"
result = self.client.text_generation(full_prompt, **kwargs)
return witness_review(result)
if __name__ == "__main__":
# Quick manual test for function
example = "How should we handle a sensitive diplomatic dispute?"
print(query_rzero_with_witness(example))
# Quick manual test for class
wrz = WitnessRZero()
print(wrz.generate("Test covenant‑aligned reasoning"))
|