iamgroot42 commited on
Commit
bcc1b51
·
verified ·
1 Parent(s): 9f3c65a

Update mimir.py

Browse files
Files changed (1) hide show
  1. mimir.py +73 -25
mimir.py CHANGED
@@ -14,6 +14,8 @@ import os
14
 
15
  import datasets
16
 
 
 
17
 
18
  _HOMEPAGE = "http://github.com/iamgroot42/mimir"
19
 
@@ -37,13 +39,14 @@ _DOWNLOAD_URL = "https://huggingface.co/datasets/iamgroot42/mimir/resolve/main/"
37
  class MimirConfig(BuilderConfig):
38
  """BuilderConfig for Mimir dataset."""
39
 
40
- def __init__(self, **kwargs):
41
  """Constructs a MimirConfig.
42
 
43
  Args:
44
  **kwargs: keyword arguments forwarded to super.
45
  """
46
  super(MimirConfig, self).__init__(**kwargs)
 
47
 
48
 
49
  class MimirDataset(GeneratorBasedBuilder):
@@ -54,10 +57,53 @@ class MimirDataset(GeneratorBasedBuilder):
54
  BUILDER_CONFIG_CLASS = MimirConfig
55
  BUILDER_CONFIGS = [
56
  MimirConfig(
57
- name="the_pile_arxiv", description="This split contains data from Arxiv"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  ),
59
  MimirConfig(
60
- name="the_pile_full_pile", description="This split contains data from multiple sources in the Pile",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
  ),
62
  ]
63
 
@@ -85,42 +131,44 @@ class MimirDataset(GeneratorBasedBuilder):
85
  NEIGHBOR_SUFFIX = "_neighbors_25_bert_in_place_swap"
86
  parent_dir = (
87
  "cache_100_200_10000_512"
88
- if self.config.name == "the_pile_full_pile"
89
  else "cache_100_200_1000_512"
90
  )
91
 
92
- file_paths = {
93
- "member": os.path.join(parent_dir, "train", self.config.name + ".jsonl"),
94
- "nonmember": os.path.join(parent_dir, "test", self.config.name + ".jsonl"),
95
- }
96
- # Load neighbor splits if they exist
97
- # TODO: This is not correct (should be checking URL, not local file structure). Fix later
98
- if os.path.exists(
99
- os.path.join(
100
- parent_dir,
101
- "train_neighbors",
102
- self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl",
103
- )
104
- ):
105
- # Assume if train nieghbors exist, test neighbors also exist
106
- file_paths["member_neighbors"] = os.path.join(
107
  parent_dir,
108
  "train_neighbors",
109
- self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl",
110
  )
111
- file_paths["nonmember_neighbors"] = os.path.join(
112
  parent_dir,
113
  "test_neighbors",
114
- self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl",
115
  )
116
 
117
  # Now that we know which files to load, download them
118
- download_paths = [_DOWNLOAD_URL + v for v in file_paths.values()]
 
 
 
 
119
  data_dir = dl_manager.download_and_extract(download_paths)
120
 
121
  splits = []
122
- for i, k in enumerate(file_paths.keys()):
123
- splits.append(SplitGenerator(name=k, gen_kwargs={"file_path": data_dir[i]}))
124
  return splits
125
 
126
  def _generate_examples(self, file_path):
 
14
 
15
  import datasets
16
 
17
+ from typing import List
18
+
19
 
20
  _HOMEPAGE = "http://github.com/iamgroot42/mimir"
21
 
 
39
  class MimirConfig(BuilderConfig):
40
  """BuilderConfig for Mimir dataset."""
41
 
42
+ def __init__(self, *args, subsets: List[str]=[], **kwargs):
43
  """Constructs a MimirConfig.
44
 
45
  Args:
46
  **kwargs: keyword arguments forwarded to super.
47
  """
48
  super(MimirConfig, self).__init__(**kwargs)
49
+ self.subsets = subsets
50
 
51
 
52
  class MimirDataset(GeneratorBasedBuilder):
 
57
  BUILDER_CONFIG_CLASS = MimirConfig
58
  BUILDER_CONFIGS = [
59
  MimirConfig(
60
+ name="arxiv",
61
+ subsets=["ngram_7_0.2", "ngram_13_0.2", "ngram_13_0.8"],
62
+ description="This split contains data from the Pile's Arxiv subset at various n-gram overlap thresholds"
63
+ ),
64
+ MimirConfig(
65
+ name="dm_mathematics",
66
+ subsets=["ngram_7_0.2", "ngram_13_0.2", "ngram_13_0.8"],
67
+ description="This split contains data from the Pile's DM Mathematics subset at various n-gram overlap thresholds"
68
+ ),
69
+ MimirConfig(
70
+ name="github",
71
+ subsets=["ngram_7_0.2", "ngram_13_0.2", "ngram_13_0.8"],
72
+ description="This split contains data from the Pile's GitHub subset at various n-gram overlap thresholds"
73
+ ),
74
+ MimirConfig(
75
+ name="hackernews",
76
+ subsets=["ngram_7_0.2", "ngram_13_0.2", "ngram_13_0.8"],
77
+ description="This split contains data from the Pile's HackerNews subset at various n-gram overlap thresholds"
78
+ ),
79
+ MimirConfig(
80
+ name="pile_cc",
81
+ subsets=["ngram_7_0.2", "ngram_13_0.2", "ngram_13_0.8"],
82
+ description="This split contains data from the Pile's Pile CC subset at various n-gram overlap thresholds"
83
+ ),
84
+ MimirConfig(
85
+ name="pubmed_central",
86
+ subsets=["ngram_7_0.2", "ngram_13_0.2", "ngram_13_0.8"],
87
+ description="This split contains data from the Pile's PubMed Central subset at various n-gram overlap thresholds"
88
  ),
89
  MimirConfig(
90
+ name="wikipedia_(en)",
91
+ subsets=["ngram_7_0.2", "ngram_13_0.2", "ngram_13_0.8"],
92
+ description="This split contains data from the Pile's Wikipedia subset at various n-gram overlap thresholds"
93
+ ),
94
+ MimirConfig(
95
+ name="full_pile", description="This split contains data from multiple sources in the Pile",
96
+ ),
97
+ MimirConfig(
98
+ name="c4", description="This split contains data the C4 dataset",
99
+ ),
100
+ MimirConfig(
101
+ name="temporal_arxiv",
102
+ subsets=["2020_08", "2021_01", "2021_06", "2022_01", "2022_06", "2023_01", "2023_06"],
103
+ description="This split contains benchmarks where non-members are selected from various months from 2020-08 and onwards",
104
+ ),
105
+ MimirConfig(
106
+ name="temporal_wiki", description="This split contains benchmarks where non-members are selected from 2023-08 and onwards",
107
  ),
108
  ]
109
 
 
131
  NEIGHBOR_SUFFIX = "_neighbors_25_bert_in_place_swap"
132
  parent_dir = (
133
  "cache_100_200_10000_512"
134
+ if self.config.name == "full_pile"
135
  else "cache_100_200_1000_512"
136
  )
137
 
138
+ if len(self.config.subsets) > 0:
139
+ subset_splits = [f"{self.config.name}_{subset}" for subset in self.config.subsets]
140
+ else:
141
+ subset_splits = [self.config.name]
142
+
143
+ file_paths = {}
144
+ for subset_split in subset_splits:
145
+ # Add standard member and non-member paths
146
+ file_paths[f"{subset_split}_member"] = os.path.join(parent_dir, "train", subset_split + ".jsonl")
147
+ file_paths[f"{subset_split}_nonmember"] = os.path.join(parent_dir, "test", subset_split + ".jsonl")
148
+
149
+ # Load associated neighbors
150
+ file_paths[f"{subset_split}_member_neighbors"] = os.path.join(
 
 
151
  parent_dir,
152
  "train_neighbors",
153
+ subset_split + f"{NEIGHBOR_SUFFIX}.jsonl",
154
  )
155
+ file_paths[f"{subset_split}_nonmember_neighbors"] = os.path.join(
156
  parent_dir,
157
  "test_neighbors",
158
+ subset_split + f"{NEIGHBOR_SUFFIX}.jsonl",
159
  )
160
 
161
  # Now that we know which files to load, download them
162
+ download_paths = []
163
+ k2i = {}
164
+ for i, (k, v) in enumerate(file_paths.items()):
165
+ download_paths.append(_DOWNLOAD_URL + v)
166
+ k2i[k] = i
167
  data_dir = dl_manager.download_and_extract(download_paths)
168
 
169
  splits = []
170
+ for k in file_paths.keys():
171
+ splits.append(SplitGenerator(name=k, gen_kwargs={"file_path": data_dir[k2i[k]]}))
172
  return splits
173
 
174
  def _generate_examples(self, file_path):