Evaluates the precision at k.
Source code in src/evaluation/metrics/order_aware/reciprocal_rank.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34 | def get_assert(output: str, context) -> GradingResult:
"""Evaluates the precision at k."""
retrieved_docs = safe_eval(context["vars"]["context"])
relevant_docs = safe_eval(context["vars"]["relevant_context"])
score = 0
# compute Reciprocal Rank
try:
score = round(1 / (relevant_docs.index(retrieved_docs[0]) + 1), 2)
except ValueError:
score = -1
# threshold = context["test"]["metadata"]["threshold_ragas_as"]
threshold = 0
if math.isnan(score):
score = 0.0
return {
"pass": score > threshold,
"score": score,
"reason": f"{score} > {threshold} = {score > threshold}",
}
|