Update tokenization_minicpm.py
Browse files- tokenization_minicpm.py +2 -2
tokenization_minicpm.py
CHANGED
|
@@ -14,7 +14,7 @@ from datamodel_code_generator.model import get_data_model_types
|
|
| 14 |
from datamodel_code_generator.parser.jsonschema import JsonSchemaParser
|
| 15 |
from jsonschema import Draft202012Validator, exceptions, validate
|
| 16 |
|
| 17 |
-
from transformers import
|
| 18 |
from transformers.tokenization_utils_base import BatchEncoding
|
| 19 |
from transformers.utils import TensorType
|
| 20 |
|
|
@@ -22,7 +22,7 @@ from transformers.utils import TensorType
|
|
| 22 |
logger = getLogger(__name__)
|
| 23 |
|
| 24 |
|
| 25 |
-
class MiniCPMTokenizer(
|
| 26 |
def apply_chat_template(
|
| 27 |
self,
|
| 28 |
conversation: Union[List[Dict[str, str]], List[List[Dict[str, str]]]],
|
|
|
|
| 14 |
from datamodel_code_generator.parser.jsonschema import JsonSchemaParser
|
| 15 |
from jsonschema import Draft202012Validator, exceptions, validate
|
| 16 |
|
| 17 |
+
from transformers import LlamaTokenizerFast
|
| 18 |
from transformers.tokenization_utils_base import BatchEncoding
|
| 19 |
from transformers.utils import TensorType
|
| 20 |
|
|
|
|
| 22 |
logger = getLogger(__name__)
|
| 23 |
|
| 24 |
|
| 25 |
+
class MiniCPMTokenizer(LlamaTokenizerFast):
|
| 26 |
def apply_chat_template(
|
| 27 |
self,
|
| 28 |
conversation: Union[List[Dict[str, str]], List[List[Dict[str, str]]]],
|