remove unused imports while no longer using distributed sampler
Browse files- geneformer/pretrainer.py +1 -5
geneformer/pretrainer.py
CHANGED
|
@@ -8,13 +8,12 @@ import math
|
|
| 8 |
import pickle
|
| 9 |
import warnings
|
| 10 |
from enum import Enum
|
| 11 |
-
from typing import Dict,
|
| 12 |
|
| 13 |
import numpy as np
|
| 14 |
import torch
|
| 15 |
from datasets import Dataset
|
| 16 |
from packaging import version
|
| 17 |
-
from torch.utils.data.distributed import DistributedSampler
|
| 18 |
from torch.utils.data.sampler import RandomSampler
|
| 19 |
from transformers import (
|
| 20 |
BatchEncoding,
|
|
@@ -24,11 +23,8 @@ from transformers import (
|
|
| 24 |
)
|
| 25 |
from transformers.file_utils import is_datasets_available, is_sagemaker_dp_enabled
|
| 26 |
from transformers.trainer_pt_utils import (
|
| 27 |
-
DistributedLengthGroupedSampler,
|
| 28 |
-
DistributedSamplerWithLoop,
|
| 29 |
LengthGroupedSampler,
|
| 30 |
)
|
| 31 |
-
from transformers.training_args import ParallelMode
|
| 32 |
from transformers.utils import is_tf_available, is_torch_available, logging, to_py_obj
|
| 33 |
from transformers.utils.generic import _is_tensorflow, _is_torch
|
| 34 |
|
|
|
|
| 8 |
import pickle
|
| 9 |
import warnings
|
| 10 |
from enum import Enum
|
| 11 |
+
from typing import Dict, List, Optional, Union
|
| 12 |
|
| 13 |
import numpy as np
|
| 14 |
import torch
|
| 15 |
from datasets import Dataset
|
| 16 |
from packaging import version
|
|
|
|
| 17 |
from torch.utils.data.sampler import RandomSampler
|
| 18 |
from transformers import (
|
| 19 |
BatchEncoding,
|
|
|
|
| 23 |
)
|
| 24 |
from transformers.file_utils import is_datasets_available, is_sagemaker_dp_enabled
|
| 25 |
from transformers.trainer_pt_utils import (
|
|
|
|
|
|
|
| 26 |
LengthGroupedSampler,
|
| 27 |
)
|
|
|
|
| 28 |
from transformers.utils import is_tf_available, is_torch_available, logging, to_py_obj
|
| 29 |
from transformers.utils.generic import _is_tensorflow, _is_torch
|
| 30 |
|