From e6961b397050de024d75ddeea7fe0fcb7013610f Mon Sep 17 00:00:00 2001 From: chenxujun Date: Thu, 15 Dec 2022 10:27:44 +0800 Subject: [PATCH] Update doc for fp16util.py (#50) update url, original url is not found --- modelscope/utils/multi_modal/fp16/fp16util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modelscope/utils/multi_modal/fp16/fp16util.py b/modelscope/utils/multi_modal/fp16/fp16util.py index 29595a6c..f7ccd167 100644 --- a/modelscope/utils/multi_modal/fp16/fp16util.py +++ b/modelscope/utils/multi_modal/fp16/fp16util.py @@ -123,7 +123,7 @@ def prep_param_lists(model, flat_master=False): Currently, if ``flat_master=True``, all the model's parameters must be the same type. If the model has parameters of different types, use ``flat_master=False``, or use :class:`FP16_Optimizer`. # noqa .. _`Training Neural Networks with Mixed Precision: Real Examples`: - http://on-demand.gputechconf.com/gtc/2018/video/S81012/ + https://www.nvidia.com/en-us/on-demand/session/gtcsiliconvalley2018-s81012/ """ model_params = [ param for param in model.parameters() if param.requires_grad