From b9cfeac9640b77b221037214c3419ed5a08bc82f Mon Sep 17 00:00:00 2001 From: Ming-Hsuan-Tu Date: Tue, 7 Feb 2023 09:31:32 +0800 Subject: [PATCH] [Improvement] Init teacher weight if without teacher_ckpt --- .../algorithms/distill/configurable/single_teacher_distill.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmrazor/models/algorithms/distill/configurable/single_teacher_distill.py b/mmrazor/models/algorithms/distill/configurable/single_teacher_distill.py index 44a8a3438..d921aa628 100644 --- a/mmrazor/models/algorithms/distill/configurable/single_teacher_distill.py +++ b/mmrazor/models/algorithms/distill/configurable/single_teacher_distill.py @@ -56,9 +56,9 @@ def __init__(self, f'{type(teacher)}') self.teacher = teacher + self.teacher.init_weights() if teacher_ckpt: # avoid loaded parameters be overwritten - self.teacher.init_weights() _ = load_checkpoint(self.teacher, teacher_ckpt) self.teacher_trainable = teacher_trainable if not self.teacher_trainable: