Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
precisions[n] = 100. * smooth_value / total[n]
else:
precisions[n] = 100. * correct[n] / total[n]
# If the system guesses no i-grams, 1 <= i <= NGRAM_ORDER, the BLEU score is 0 (technically undefined).
# This is a problem for sentence-level BLEU or a corpus of short sentences, where systems will get no credit
# if sentence lengths fall under the NGRAM_ORDER threshold. This fix scales NGRAM_ORDER to the observed
# maximum order. It is only available through the API and off by default
brevity_penalty = 1.0
if sys_len < ref_len:
brevity_penalty = math.exp(1 - ref_len / sys_len) if sys_len > 0 else 0.0
score = brevity_penalty * math.exp(sum(map(my_log, precisions[:effective_order])) / effective_order)
return BLEU(score, correct, total, precisions, brevity_penalty, sys_len, ref_len)