Skip to content

Commit 6156999

Browse files
Remove if __main__ from main.py
1 parent 1c2a534 commit 6156999

File tree

1 file changed

+0
-44
lines changed

1 file changed

+0
-44
lines changed

validator/main.py

Lines changed: 0 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -134,47 +134,3 @@ def validate(self, value: Any, metadata: Dict = {}) -> ValidationResult:
134134
)
135135

136136
return PassResult()
137-
138-
139-
if __name__ == "__main__":
140-
from validator.prompts.prompts import RagContextRelevancePrompt
141-
142-
guard = Guard().use(
143-
MLcubeRagContextValidator(
144-
rag_context_eval_prompt=RagContextRelevancePrompt(),
145-
pass_threshold=1,
146-
model_name="gpt-4o-mini",
147-
on_fail="noop", # type: ignore
148-
on="prompt",
149-
)
150-
)
151-
152-
metadata = {
153-
"user_input": "What's the weather in Milan, today?",
154-
"retrieved_context": "Milan, what a beautiful day. Sunny and warm.",
155-
}
156-
157-
response = guard(
158-
llm_api=openai.chat.completions.create,
159-
prompt=metadata["user_input"],
160-
model="gpt-4o-mini",
161-
max_tokens=1024,
162-
temperature=0,
163-
metadata=metadata,
164-
)
165-
166-
print(response)
167-
assert response.validation_passed, "The validation should pass." # type: ignore
168-
169-
metadata["retrieved_context"] = "The capital of Italy is Rome."
170-
response = guard(
171-
llm_api=openai.chat.completions.create,
172-
prompt=metadata["user_input"],
173-
model="gpt-4o-mini",
174-
max_tokens=1024,
175-
temperature=0,
176-
metadata=metadata,
177-
)
178-
179-
print(response)
180-
assert not response.validation_passed, "The validation shouldn't pass." # type: ignore

0 commit comments

Comments
 (0)