rfernand commited on
Commit
7f191b3
·
1 Parent(s): fb9c223

Upload basic_sentence_transforms.py

Browse files
Files changed (1) hide show
  1. basic_sentence_transforms.py +6 -2
basic_sentence_transforms.py CHANGED
@@ -169,9 +169,13 @@ class BasicSentenceTransforms(datasets.GeneratorBasedBuilder):
169
  VERSION = datasets.Version("0.0.18")
170
 
171
  def _info(self):
 
 
 
172
  return datasets.DatasetInfo(
173
  description="The dataset consists of diagnostic/warm-up tasks and core tasks within this dataset." +
174
  "The core tasks represent the translation of English sentences between the active, passive, and logical forms.",
 
175
  supervised_keys=None,
176
  homepage=None,
177
  citation=None,
@@ -183,7 +187,7 @@ class BasicSentenceTransforms(datasets.GeneratorBasedBuilder):
183
 
184
  dl_dir = dl_manager.download_and_extract(url)
185
  task = self.config_id
186
- print("task: {}, dl_dir: {}}".format(task, dl_dir))
187
 
188
  splits = [
189
  SplitGenerator(name=Split.TRAIN, gen_kwargs={"data_file": os.path.join(dl_dir, "train.jsonl")}),
@@ -201,7 +205,7 @@ class BasicSentenceTransforms(datasets.GeneratorBasedBuilder):
201
 
202
  def _generate_examples(self, data_file):
203
  print("_generate_examples: data_file: {}".format(data_file))
204
-
205
  with open(data_file, encoding="utf-8") as f:
206
  for i, line in enumerate(f):
207
  key = str(i)
 
169
  VERSION = datasets.Version("0.0.18")
170
 
171
  def _info(self):
172
+ # features are now required here, so get them from the current CONFIG (following code example from super_glue.py)
173
+ features = {feature: datasets.Value("string") for feature in self.config.features}
174
+
175
  return datasets.DatasetInfo(
176
  description="The dataset consists of diagnostic/warm-up tasks and core tasks within this dataset." +
177
  "The core tasks represent the translation of English sentences between the active, passive, and logical forms.",
178
+ features=datasets.Features(features),
179
  supervised_keys=None,
180
  homepage=None,
181
  citation=None,
 
187
 
188
  dl_dir = dl_manager.download_and_extract(url)
189
  task = self.config_id
190
+ print("task: {}, dl_dir: {}".format(task, dl_dir))
191
 
192
  splits = [
193
  SplitGenerator(name=Split.TRAIN, gen_kwargs={"data_file": os.path.join(dl_dir, "train.jsonl")}),
 
205
 
206
  def _generate_examples(self, data_file):
207
  print("_generate_examples: data_file: {}".format(data_file))
208
+
209
  with open(data_file, encoding="utf-8") as f:
210
  for i, line in enumerate(f):
211
  key = str(i)