akommula commited on
Commit
31da5fd
·
1 Parent(s): c5ea02e

Added openpi_v2.py

Browse files
Files changed (2) hide show
  1. .openpi_v2.py.swp +0 -0
  2. openpi_v2.py +162 -0
.openpi_v2.py.swp ADDED
Binary file (20.5 kB). View file
 
openpi_v2.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+ """Openpi V2: A Dataset for tracking state changes in prcedural text by using an unrestricted library"""
18
+
19
+ import json
20
+ import os
21
+ import textwrap
22
+
23
+ import numpy as np
24
+
25
+ import datasets
26
+
27
+ _OPENPI_V2_CITATION = """\
28
+ @inproceedings{
29
+ title={{OPENPI V2}: }
30
+ author={}
31
+ note={}
32
+ year={2022}
33
+ }
34
+ """
35
+
36
+ _OPENPI_V2_DESCRIPTION = """\
37
+ TEMPORARY DESCRIPTION
38
+ """
39
+
40
+ _LICENSE = "CC BY 4.0"
41
+ _VERSION = "1.0.0"
42
+ _HOMEPAGE = "https://allenai.org/data/openpi"
43
+ _URL = "https://github.com/allenai/openpi_v2/tree/main/v2/data/"
44
+ _DATA_FILES = {"train": "train-data.json"
45
+ "dev": "dev-data.json"
46
+ "test": "test-data.json"}
47
+
48
+
49
+ class OpenpiConfig(datasets.BuilderConfig):
50
+ """BuilderConfig for Openpi V2."""
51
+
52
+ def __init__(
53
+ self,
54
+ text_features,
55
+ data_url,
56
+ data_dir,
57
+ citation,
58
+ url,
59
+ process_label = lambda x: x,
60
+ **kwargs,
61
+ ):
62
+
63
+ super(OpenpiConfig, self).__init__(version = datasets.Version(_VERSION), **kwargs)
64
+ self.text_features = text_features
65
+ self.data_url = data_url
66
+ self.data_dir = data_dir
67
+ self.citation = citation
68
+ self.url = url
69
+ self.process_label = process_label
70
+
71
+
72
+ class OpenpiV2(datasets.GeneratorBasedBuilder):
73
+
74
+ BUILDER_CONFIGS = [
75
+ OpenpiConfig(
76
+ name = "openpi_text"
77
+ description = textwrap.dedent(
78
+ """\
79
+ """
80
+ ),
81
+ text_features = datasets.Features({
82
+ "goal": datasets.Value("string"),
83
+ "steps": [datasets.Value("string")],
84
+ "topics": datasets.Value("string"),
85
+ "image_urls": [datasets.Value("string")],
86
+ "states": [
87
+ datasets.features.Sequence(
88
+ {
89
+ "answers_openpiv1_metadata": datasets.features.Sequence(
90
+ {
91
+ "entity": datasets.Value("string")
92
+ "attribute": datasets.Value("string")
93
+ "answers": [datasets.Value("string")]
94
+ "modality": [datasets.Value("string")]
95
+ }
96
+ ),
97
+ "entity": datasets.Value("string"),
98
+ "attribute": datasets.Value("string"),
99
+ "answers": [datasets.Value("string")]
100
+ }
101
+ )
102
+ ]
103
+ }),
104
+ data_url = _URL
105
+ citation = textwrap.dedent(
106
+ """\
107
+ @inproceedings{
108
+ title={},
109
+ author={},
110
+ booktitle={},
111
+ year={}
112
+ }"""
113
+ ),
114
+ url = _HOMEPAGE
115
+ )
116
+ ]
117
+
118
+ def _info(self):
119
+ return datasets.DatasetInfo(
120
+ description = _OPENPI_V2_DESCRIPTION,
121
+ features = self.config.text_features,
122
+ supervised_keys = None,
123
+ homepage = self.config.url,
124
+ citation = self.config.citation + "\n" + _OPENPI_V2_CITATION
125
+ )
126
+
127
+ def _split_generators(self, dl_manager):
128
+ return [
129
+ datasets.SplitGenerator(
130
+ name = datasets.Split.TRAIN,
131
+ gen_kwargs = {
132
+ "filepath": "./data/" + _DATA_FILES["train"]
133
+ },
134
+ ),
135
+ datasets.SplitGenerator(
136
+ name = datasets.Split.VALIDATION,
137
+ gen_kwargs = {
138
+ "filepath": "./data/" + _DATA_FILES["dev"]
139
+ },
140
+ ),
141
+ datasets.SplitGenerator(
142
+ name = datasets.Split.TEST,
143
+ gen_kwargs = {
144
+ "filepath": "./data/" + _DATA_FILES["test"]
145
+ }
146
+ ),
147
+ ]
148
+
149
+ def _generate_examples(self, filepath, files):
150
+ logger.info("generating examples from = %s", filepath)
151
+
152
+ for path, f in files:
153
+ if path == file_path:
154
+ openpi_v2 = json.loads(f.read().decode("utf-8"))
155
+ print(openpi_v2)
156
+
157
+ for id_ in openpi_v2:
158
+ yield id_, openpi_v2[id_]
159
+
160
+ break
161
+
162
+