add empty tokenized_counts for loom to pass until keep_counts implemented
Browse files- geneformer/tokenizer.py +3 -1
geneformer/tokenizer.py
CHANGED
|
@@ -617,6 +617,8 @@ class TranscriptomeTokenizer:
|
|
| 617 |
return tokenized_cells, file_cell_metadata, tokenized_counts
|
| 618 |
|
| 619 |
def tokenize_loom(self, loom_file_path, target_sum=10_000, file_format="loom"):
|
|
|
|
|
|
|
| 620 |
if self.custom_attr_name_dict is not None:
|
| 621 |
file_cell_metadata = {
|
| 622 |
attr_key: [] for attr_key in self.custom_attr_name_dict.keys()
|
|
@@ -704,7 +706,7 @@ class TranscriptomeTokenizer:
|
|
| 704 |
del data.ra["ensembl_id_collapsed"]
|
| 705 |
|
| 706 |
|
| 707 |
-
return tokenized_cells, file_cell_metadata
|
| 708 |
|
| 709 |
def create_dataset(
|
| 710 |
self,
|
|
|
|
| 617 |
return tokenized_cells, file_cell_metadata, tokenized_counts
|
| 618 |
|
| 619 |
def tokenize_loom(self, loom_file_path, target_sum=10_000, file_format="loom"):
|
| 620 |
+
tokenized_counts = [] # keep_counts not implemented for tokenize_loom
|
| 621 |
+
|
| 622 |
if self.custom_attr_name_dict is not None:
|
| 623 |
file_cell_metadata = {
|
| 624 |
attr_key: [] for attr_key in self.custom_attr_name_dict.keys()
|
|
|
|
| 706 |
del data.ra["ensembl_id_collapsed"]
|
| 707 |
|
| 708 |
|
| 709 |
+
return tokenized_cells, file_cell_metadata, tokenized_counts
|
| 710 |
|
| 711 |
def create_dataset(
|
| 712 |
self,
|