Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_17-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_21-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_21-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_21-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_22-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_22-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_26-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_26-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_26-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_27-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_27-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_27-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_28-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_28-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_28-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_30-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_30-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_30-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_31-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_31-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_31-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_34-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_36-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_36-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_36-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_37-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_37-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_37-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_40-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_40-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_40-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_43-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24615022, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9377150, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24738376, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9425383, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5820971, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 43, "20th": 51, "30th": 58, "40th": 67, "50th": 78, "60th": 93, "70th": 116, "80th": 155, "90th": 234, "95th": 329, "99th": 563, "100th": 4539}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_17-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5810650, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 42, "20th": 50, "30th": 58, "40th": 67, "50th": 78, "60th": 94, "70th": 118, "80th": 155, "90th": 235, "95th": 329, "99th": 564, "100th": 2738}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24521993, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9313602, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5766887, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 42, "20th": 50, "30th": 58, "40th": 66, "50th": 78, "60th": 92, "70th": 116, "80th": 155, "90th": 234, "95th": 324, "99th": 554, "100th": 3319}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24249330, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9178635, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5698766, "total_tokens_skipped": 0, "percentiles": {"0th": 18, "10th": 42, "20th": 50, "30th": 58, "40th": 67, "50th": 77, "60th": 91, "70th": 114, "80th": 150, "90th": 227, "95th": 322, "99th": 545, "100th": 5439}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24422250, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9162727, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_21-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24611225, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9353328, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_21-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5776706, "total_tokens_skipped": 0, "percentiles": {"0th": 18, "10th": 42, "20th": 51, "30th": 58, "40th": 67, "50th": 78, "60th": 93, "70th": 116, "80th": 154, "90th": 232, "95th": 324, "99th": 564, "100th": 3646}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_21-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_22-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24714019, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9450731, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_22-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_26-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24607192, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9312725, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_26-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5775702, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 42, "20th": 50, "30th": 58, "40th": 66, "50th": 78, "60th": 93, "70th": 116, "80th": 153, "90th": 232, "95th": 323, "99th": 561, "100th": 3947}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_26-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_27-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24789130, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9388730, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_27-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5821144, "total_tokens_skipped": 0, "percentiles": {"0th": 18, "10th": 42, "20th": 51, "30th": 58, "40th": 67, "50th": 78, "60th": 93, "70th": 117, "80th": 156, "90th": 237, "95th": 327, "99th": 560, "100th": 3773}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_27-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_28-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24727454, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9392199, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_28-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5805758, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 43, "20th": 51, "30th": 58, "40th": 67, "50th": 79, "60th": 94, "70th": 118, "80th": 156, "90th": 234, "95th": 326, "99th": 552, "100th": 2909}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_28-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25148734, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9601251, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5923535, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 42, "20th": 51, "30th": 59, "40th": 68, "50th": 80, "60th": 95, "70th": 119, "80th": 158, "90th": 238, "95th": 331, "99th": 560, "100th": 7701}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_30-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25442238, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9722800, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_30-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5984365, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 42, "20th": 51, "30th": 59, "40th": 68, "50th": 80, "60th": 96, "70th": 120, "80th": 161, "90th": 244, "95th": 338, "99th": 586, "100th": 3014}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_30-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_31-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24811263, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9450922, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_31-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5826681, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 42, "20th": 51, "30th": 58, "40th": 67, "50th": 78, "60th": 93, "70th": 117, "80th": 155, "90th": 237, "95th": 330, "99th": 572, "100th": 4660}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_31-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_34-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5735597, "total_tokens_skipped": 0, "percentiles": {"0th": 18, "10th": 42, "20th": 50, "30th": 57, "40th": 66, "50th": 77, "60th": 92, "70th": 115, "80th": 153, "90th": 231, "95th": 325, "99th": 555, "100th": 2410}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_36-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25283276, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9617494, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_36-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5944606, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 42, "20th": 51, "30th": 58, "40th": 67, "50th": 79, "60th": 95, "70th": 120, "80th": 160, "90th": 246, "95th": 339, "99th": 576, "100th": 4484}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_36-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_37-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24724873, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9394944, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_37-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5805101, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 42, "20th": 51, "30th": 58, "40th": 67, "50th": 78, "60th": 93, "70th": 115, "80th": 153, "90th": 235, "95th": 329, "99th": 559, "100th": 4679}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_37-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25127444, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9617343, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5918174, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 43, "20th": 51, "30th": 59, "40th": 67, "50th": 79, "60th": 95, "70th": 118, "80th": 159, "90th": 243, "95th": 339, "99th": 565, "100th": 2660}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_40-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25415169, "hashes": {}}, "samples": 50008, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9610438, "hashes": {}}}], "version": 2}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_40-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 5977594, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 42, "20th": 51, "30th": 58, "40th": 67, "50th": 79, "60th": 94, "70th": 118, "80th": 157, "90th": 238, "95th": 328, "99th": 566, "100th": 8191}}
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_40-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
train/multi-wikis/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_43-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24964386, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9480077, "hashes": {}}}], "version": 2}
|