From d2439fe443fbbac0bec5de7368c3e43998c3a79f Mon Sep 17 00:00:00 2001 From: x54-729 <17307130121@fudan.edu.cn> Date: Wed, 13 Apr 2022 09:05:21 +0000 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=5FMetricsWrapper=20update?= =?UTF-8?q?=E4=BC=A0=E5=8F=82=E7=9A=84bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastNLP/core/controllers/evaluator.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/fastNLP/core/controllers/evaluator.py b/fastNLP/core/controllers/evaluator.py index 479686e1..2e3678d3 100644 --- a/fastNLP/core/controllers/evaluator.py +++ b/fastNLP/core/controllers/evaluator.py @@ -364,16 +364,16 @@ class _MetricsWrapper: else: args.append(batch) if not isinstance(outputs, dict): - raise RuntimeError(f"The output of your model is of type:`{type(batch)}`, please either directly" + raise RuntimeError(f"The output of your model is of type:`{type(outputs)}`, please either directly" f" return a dict from your model or use `output_mapping` to convert it into dict type.") if isinstance(metric, Metric): - auto_param_call(metric.update, batch, *args) + auto_param_call(metric.update, outputs, *args) elif _is_torchmetrics_metric(metric): - auto_param_call(metric.update, batch, *args) + auto_param_call(metric.update, outputs, *args) elif _is_allennlp_metric(metric): - auto_param_call(metric.__call__, batch, *args) + auto_param_call(metric.__call__, outputs, *args) elif _is_paddle_metric(metric): - res = auto_param_call(metric.compute, batch, *args) + res = auto_param_call(metric.compute, outputs, *args) metric.update(res) def reset(self):