Skip to content

Commit 57e5b63

Browse files
committed
Remove ununsed imports
1 parent fb9f933 commit 57e5b63

File tree

3 files changed

+4
-16
lines changed

3 files changed

+4
-16
lines changed

flair/trainers/plugins/functional/onecycle.py

-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
1-
import copy
21
import logging
32

4-
from torch.optim.lr_scheduler import OneCycleLR # type: ignore
5-
63
from flair.optim import LinearSchedulerWithWarmup
74
from flair.trainers.plugins.base import TrainerPlugin
85

flair/trainers/trainer.py

+3-12
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import datetime
21
import inspect
32
import logging
43
import os
@@ -7,7 +6,7 @@
76
import warnings
87
from inspect import signature
98
from pathlib import Path
10-
from typing import Any, Dict, List, Optional, Tuple, Type, Union
9+
from typing import List, Optional, Tuple, Type, Union
1110

1211
import torch
1312
from torch.optim.sgd import SGD
@@ -17,8 +16,6 @@
1716
import flair.nn
1817
from flair.data import Corpus, Dictionary, _len_dataset
1918
from flair.datasets import DataLoader
20-
from flair.nn import Model
21-
from flair.optim import ExpAnnealLR, LinearSchedulerWithWarmup
2219
from flair.trainers.plugins import (
2320
CheckpointPlugin,
2421
LogFilePlugin,
@@ -32,13 +29,7 @@
3229
)
3330
from flair.trainers.plugins.functional.anneal_on_plateau import AnnealingPlugin
3431
from flair.trainers.plugins.functional.onecycle import OneCyclePlugin
35-
from flair.training_utils import (
36-
AnnealOnPlateau,
37-
identify_dynamic_embeddings,
38-
init_output_file,
39-
log_line,
40-
store_embeddings,
41-
)
32+
from flair.training_utils import identify_dynamic_embeddings, log_line, store_embeddings
4233

4334
log = logging.getLogger("flair")
4435

@@ -394,7 +385,7 @@ def train_custom(
394385
# - SchedulerPlugin -> load state for anneal_with_restarts, batch_growth_annealing, logic for early stopping
395386
# - LossFilePlugin -> get the current epoch for loss file logging
396387
self.dispatch("before_training_epoch", epoch=epoch)
397-
self.model.model_card["training_parameters"]["epoch"] = epoch # type: ignore
388+
self.model.model_card["training_parameters"]["epoch"] = epoch # type: ignore
398389

399390
current_learning_rate = [group["lr"] for group in self.optimizer.param_groups]
400391
momentum = [group["momentum"] if "momentum" in group else 0 for group in self.optimizer.param_groups]

tests/test_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def test_text_classifier_multi(results_base_path, tasks_base_path):
4242
"Parameters:",
4343
"- learning_rate: ",
4444
"patience",
45-
"embedding storage",
45+
"embedding storage:",
4646
"epoch 1 - iter",
4747
"EPOCH 1 done: loss",
4848
"Results:",

0 commit comments

Comments
 (0)