diff --git a/.ipynb_checkpoints/checkLora-checkpoint.py b/.ipynb_checkpoints/checkLora-checkpoint.py
index 95c57d771d4168e6bbf7b3d86368ef42faade469..9f4d7936cccac9bed0f24e0ef2a03ba2f8b0fa03 100644
--- a/.ipynb_checkpoints/checkLora-checkpoint.py
+++ b/.ipynb_checkpoints/checkLora-checkpoint.py
@@ -314,9 +314,12 @@ tokenizer = BertTokenizer.from_pretrained("./micro_no_cross_fine_tuned2")
 
 
 base_model = MultiTaskBert.from_pretrained('bert-base-uncased').to(device)
+print("Загружена базовая")
 
 model = PeftModel.from_pretrained(base_model, "./micro_no_cross_fine_tuned/lora2", strict=False)
 
+print("Загружена с лора")
+
 # Переводим модель в режим оценки
 model.eval()
 
diff --git a/checkLora.py b/checkLora.py
index 95c57d771d4168e6bbf7b3d86368ef42faade469..9f4d7936cccac9bed0f24e0ef2a03ba2f8b0fa03 100644
--- a/checkLora.py
+++ b/checkLora.py
@@ -314,9 +314,12 @@ tokenizer = BertTokenizer.from_pretrained("./micro_no_cross_fine_tuned2")
 
 
 base_model = MultiTaskBert.from_pretrained('bert-base-uncased').to(device)
+print("Загружена базовая")
 
 model = PeftModel.from_pretrained(base_model, "./micro_no_cross_fine_tuned/lora2", strict=False)
 
+print("Загружена с лора")
+
 # Переводим модель в режим оценки
 model.eval()