Datasets:

Languages:
English
ArXiv:
License:
orionweller commited on
Commit
513a1bd
·
verified ·
1 Parent(s): dc3ccb2

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/stackexchange-dolmino-dup-2/split_0-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  2. train/stackexchange-dolmino-dup-2/split_0-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  3. train/stackexchange-dolmino-dup-2/split_0-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  4. train/stackexchange-dolmino-dup-2/split_14-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  5. train/stackexchange-dolmino-dup-2/split_14-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  6. train/stackexchange-dolmino-dup-2/split_14-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  7. train/stackexchange-dolmino-dup-2/split_22-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  8. train/stackexchange-dolmino-dup-2/split_22-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  9. train/stackexchange-dolmino-dup-2/split_22-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  10. train/stackexchange-dolmino-dup-2/split_23-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  11. train/stackexchange-dolmino-dup-2/split_23-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  12. train/stackexchange-dolmino-dup-2/split_23-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  13. train/stackexchange-dolmino-dup-2/split_24-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  14. train/stackexchange-dolmino-dup-2/split_24-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  15. train/stackexchange-dolmino-dup-2/split_24-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  16. train/stackexchange-dolmino-dup-2/split_29-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds +3 -0
  17. train/stackexchange-dolmino-dup-2/split_30-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  18. train/stackexchange-dolmino-dup-2/split_30-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  19. train/stackexchange-dolmino-dup-2/split_30-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  20. train/stackexchange-dolmino-dup-2/split_31-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  21. train/stackexchange-dolmino-dup-2/split_31-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  22. train/stackexchange-dolmino-dup-2/split_31-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  23. train/stackexchange-dolmino-dup-2/split_32-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  24. train/stackexchange-dolmino-dup-2/split_32-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  25. train/stackexchange-dolmino-dup-2/split_37-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  26. train/stackexchange-dolmino-dup-2/split_37-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  27. train/stackexchange-dolmino-dup-2/split_37-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  28. train/stackexchange-dolmino-dup-2/split_43-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  29. train/stackexchange-dolmino-dup-2/split_43-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  30. train/stackexchange-dolmino-dup-2/split_43-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  31. train/stackexchange-dolmino-dup-2/split_46-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  32. train/stackexchange-dolmino-dup-2/split_46-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  33. train/stackexchange-dolmino-dup-2/split_46-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  34. train/stackexchange-dolmino-dup-2/split_47-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  35. train/stackexchange-dolmino-dup-2/split_47-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  36. train/stackexchange-dolmino-dup-2/split_47-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  37. train/stackexchange-dolmino-dup-2/split_5-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  38. train/stackexchange-dolmino-dup-2/split_5-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  39. train/stackexchange-dolmino-dup-2/split_5-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  40. train/stackexchange-dolmino-dup-2/split_53-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  41. train/stackexchange-dolmino-dup-2/split_53-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  42. train/stackexchange-dolmino-dup-2/split_53-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  43. train/stackexchange-dolmino-dup-2/split_59-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  44. train/stackexchange-dolmino-dup-2/split_59-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  45. train/stackexchange-dolmino-dup-2/split_59-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  46. train/stackexchange-dolmino-dup-2/split_62-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  47. train/stackexchange-dolmino-dup-2/split_62-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
  48. train/stackexchange-dolmino-dup-2/split_62-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
  49. train/stackexchange-dolmino-dup-2/split_65-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
  50. train/stackexchange-dolmino-dup-2/split_65-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
train/stackexchange-dolmino-dup-2/split_0-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 34544411, "hashes": {}}, "samples": 30013, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 21020557, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_0-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16525372, "total_tokens_skipped": 0, "percentiles": {"0th": 35, "10th": 166, "20th": 225, "30th": 279, "40th": 335, "50th": 402, "60th": 483, "70th": 588, "80th": 745, "90th": 1048, "95th": 1423, "99th": 2712, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_0-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_14-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 34372834, "hashes": {}}, "samples": 30012, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 21056108, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_14-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16437122, "total_tokens_skipped": 112, "percentiles": {"0th": 35, "10th": 172, "20th": 230, "30th": 286, "40th": 342, "50th": 407, "60th": 486, "70th": 589, "80th": 742, "90th": 1025, "95th": 1392, "99th": 2634, "100th": 7999}}
train/stackexchange-dolmino-dup-2/split_14-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_22-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35369091, "hashes": {}}, "samples": 30014, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20560406, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_22-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16943271, "total_tokens_skipped": 55, "percentiles": {"0th": 40, "10th": 169, "20th": 227, "30th": 282, "40th": 340, "50th": 405, "60th": 487, "70th": 594, "80th": 755, "90th": 1084, "95th": 1478, "99th": 2911, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_22-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_23-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35435349, "hashes": {}}, "samples": 30011, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20757607, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_23-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16976477, "total_tokens_skipped": 89, "percentiles": {"0th": 37, "10th": 172, "20th": 231, "30th": 284, "40th": 343, "50th": 410, "60th": 492, "70th": 601, "80th": 759, "90th": 1077, "95th": 1467, "99th": 2894, "100th": 7999}}
train/stackexchange-dolmino-dup-2/split_23-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_24-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35519214, "hashes": {}}, "samples": 30015, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20545735, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_24-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 17017390, "total_tokens_skipped": 122, "percentiles": {"0th": 35, "10th": 172, "20th": 229, "30th": 284, "40th": 345, "50th": 416, "60th": 496, "70th": 606, "80th": 772, "90th": 1077, "95th": 1455, "99th": 2784, "100th": 7999}}
train/stackexchange-dolmino-dup-2/split_24-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_29-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20a72d28471c1b9fa865bfcedd4edb99df51a2223a3d0e20992c19e6f982d501
3
+ size 35819497
train/stackexchange-dolmino-dup-2/split_30-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 34856717, "hashes": {}}, "samples": 30012, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20284162, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_30-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16690851, "total_tokens_skipped": 0, "percentiles": {"0th": 31, "10th": 173, "20th": 231, "30th": 285, "40th": 343, "50th": 408, "60th": 486, "70th": 590, "80th": 749, "90th": 1058, "95th": 1433, "99th": 2735, "100th": 7999}}
train/stackexchange-dolmino-dup-2/split_30-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_31-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 34889145, "hashes": {}}, "samples": 30013, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20521812, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_31-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16705303, "total_tokens_skipped": 3, "percentiles": {"0th": 38, "10th": 170, "20th": 229, "30th": 284, "40th": 342, "50th": 409, "60th": 491, "70th": 597, "80th": 757, "90th": 1059, "95th": 1437, "99th": 2681, "100th": 7999}}
train/stackexchange-dolmino-dup-2/split_31-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_32-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 17949158, "total_tokens_skipped": 11, "percentiles": {"0th": 30, "10th": 180, "20th": 241, "30th": 298, "40th": 358, "50th": 427, "60th": 513, "70th": 624, "80th": 793, "90th": 1137, "95th": 1548, "99th": 3155, "100th": 7999}}
train/stackexchange-dolmino-dup-2/split_32-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_37-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 37807649, "hashes": {}}, "samples": 30016, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 22718353, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_37-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 18149064, "total_tokens_skipped": 0, "percentiles": {"0th": 31, "10th": 183, "20th": 245, "30th": 304, "40th": 366, "50th": 436, "60th": 524, "70th": 636, "80th": 809, "90th": 1152, "95th": 1596, "99th": 3070, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_37-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_43-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 36617998, "hashes": {}}, "samples": 30014, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 21780959, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_43-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 17560514, "total_tokens_skipped": 267, "percentiles": {"0th": 37, "10th": 181, "20th": 242, "30th": 297, "40th": 357, "50th": 424, "60th": 506, "70th": 615, "80th": 781, "90th": 1116, "95th": 1535, "99th": 2947, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_43-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_46-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 36420954, "hashes": {}}, "samples": 30022, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 22024261, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_46-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 17460188, "total_tokens_skipped": 122, "percentiles": {"0th": 34, "10th": 180, "20th": 239, "30th": 295, "40th": 356, "50th": 421, "60th": 505, "70th": 613, "80th": 782, "90th": 1108, "95th": 1512, "99th": 2885, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_46-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_47-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 22585722, "hashes": {}}, "samples": 18498, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13056255, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_47-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 10834541, "total_tokens_skipped": 54, "percentiles": {"0th": 42, "10th": 179, "20th": 239, "30th": 299, "40th": 359, "50th": 425, "60th": 509, "70th": 620, "80th": 784, "90th": 1109, "95th": 1543, "99th": 2970, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_47-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_5-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35402449, "hashes": {}}, "samples": 30021, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 21655513, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_5-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16953102, "total_tokens_skipped": 82, "percentiles": {"0th": 33, "10th": 174, "20th": 234, "30th": 289, "40th": 346, "50th": 413, "60th": 496, "70th": 603, "80th": 760, "90th": 1058, "95th": 1458, "99th": 2811, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_5-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_53-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 34100278, "hashes": {}}, "samples": 30006, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20191890, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_53-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16307254, "total_tokens_skipped": 42, "percentiles": {"0th": 38, "10th": 169, "20th": 223, "30th": 277, "40th": 332, "50th": 396, "60th": 473, "70th": 578, "80th": 734, "90th": 1038, "95th": 1424, "99th": 2674, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_53-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_59-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35641787, "hashes": {}}, "samples": 30019, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 21220777, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_59-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 17079632, "total_tokens_skipped": 115, "percentiles": {"0th": 31, "10th": 171, "20th": 229, "30th": 285, "40th": 344, "50th": 410, "60th": 491, "70th": 601, "80th": 762, "90th": 1088, "95th": 1483, "99th": 2898, "100th": 8000}}
train/stackexchange-dolmino-dup-2/split_59-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_62-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 34016705, "hashes": {}}, "samples": 30007, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20188573, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_62-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16267336, "total_tokens_skipped": 0, "percentiles": {"0th": 32, "10th": 168, "20th": 223, "30th": 277, "40th": 335, "50th": 399, "60th": 477, "70th": 587, "80th": 741, "90th": 1035, "95th": 1383, "99th": 2559, "100th": 7999}}
train/stackexchange-dolmino-dup-2/split_62-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/stackexchange-dolmino-dup-2/split_65-tokenized-chunked-8000-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35166104, "hashes": {}}, "samples": 30016, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20387070, "hashes": {}}}], "version": 2}
train/stackexchange-dolmino-dup-2/split_65-tokenized-chunked-8000-512-128-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16842016, "total_tokens_skipped": 3, "percentiles": {"0th": 38, "10th": 171, "20th": 228, "30th": 282, "40th": 339, "50th": 407, "60th": 485, "70th": 589, "80th": 748, "90th": 1057, "95th": 1476, "99th": 2861, "100th": 8000}}