biancaganescu commited on
Commit
de69cc1
·
verified ·
1 Parent(s): 1a1731c

Create pythia-training-metrics.py

Browse files
Files changed (1) hide show
  1. pythia-training-metrics.py +158 -0
pythia-training-metrics.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datasets
2
+ import pickle
3
+
4
+ _DESCRIPTION = """\
5
+ Dataset for storing training metrics of pythia models
6
+ """
7
+
8
+ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
9
+
10
+ MODEL_SIZES = [
11
+ "14m"
12
+ ]
13
+
14
+ _GRADIENTS_DESCRIPTION = """\
15
+ Dataset for storing gradients of pythia models of the requested model size
16
+ """
17
+
18
+ _WEIGHTS_DESCRIPTION = """\
19
+ Dataset for storing weights of pythia models of the requested model size
20
+ """
21
+
22
+ _WEIGHTS_MINI_DESCRIPTION = """\
23
+ Dataset for storing weights of pythia models (minimizes the amount of gradients per
24
+ checkpoint to only 2) of the requested model size
25
+ """
26
+
27
+ _ACTIVATIONS_DESCRIPTION = """\
28
+ Dataset for storing activations of pythia models of the requested model size
29
+ """
30
+
31
+ BUILDER_CONFIGS = []
32
+ for model_size in MODEL_SIZES:
33
+ BUILDER_CONFIGS.extend([
34
+ datasets.BuilderConfig(
35
+ name=f"{model_size}__gradients",
36
+ description=_WEIGHTS_DESCRIPTION,
37
+ version="1.0.0",
38
+ ),
39
+ datasets.BuilderConfig(
40
+ name=f"{model_size}__gradients_mini",
41
+ description=_WEIGHTS_MINI_DESCRIPTION,
42
+ version="1.0.0",
43
+ ),
44
+ datasets.BuilderConfig(
45
+ name=f"{model_size}__activations",
46
+ description=_ACTIVATIONS_DESCRIPTION,
47
+ version="1.0.0",
48
+ ),
49
+ datasets.BuilderConfig(
50
+ name=f"{model_size}__weights",
51
+ description=_WEIGHTS_DESCRIPTION,
52
+ version="1.0.0",
53
+ ),
54
+ ])
55
+
56
+ def _info(self):
57
+ """
58
+ NOTE: we might want to specify features, but since the features are different for each
59
+ model size it's annoying and kind of pointless since hf does it automatically
60
+ """
61
+
62
+ return datasets.DatasetInfo(
63
+ description=_DESCRIPTION,
64
+ )
65
+
66
+
67
+ def _split_generators(self, dl_manager: datasets.DownloadManager):
68
+ """
69
+ Returns data for different splits - we define a split as a model size.
70
+ """
71
+
72
+ to_download_files = []
73
+
74
+ kwargs_checkpoint_steps = []
75
+ kwargs_gradient_steps = []
76
+
77
+ checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, 2000, 3000, 4000, 4091]
78
+
79
+ def get_gradient_step(step: int):
80
+ """
81
+ Return a list of the gradient steps that are used at a given checkpoint step.
82
+ """
83
+ return list(range(max(0, step-5), min(step+6, 4091)))
84
+
85
+ def get_gradient_mini_step(step: int):
86
+ """
87
+ Return a list of the gradient steps that are used at a given checkpoint step, we
88
+ limit the amount of gradients to only 2.
89
+ """
90
+ if step != checkpoint_steps[-1]:
91
+ return [step, step+1]
92
+ else:
93
+ return [step-2, step-1]
94
+
95
+ model_size = self.config.name.split("__")[0]
96
+
97
+ for checkpoint_step in checkpoint_steps:
98
+
99
+ directory_path = f"./models/{model_size}/checkpoint_{checkpoint_step}"
100
+
101
+ if "activations" in self.config.name:
102
+ to_download_files.append(f"{directory_path}/checkpoint_activations.pickle")
103
+ kwargs_checkpoint_steps.append(checkpoint_step)
104
+ elif "weights" in self.config.name:
105
+ to_download_files.append(f"{directory_path}/checkpoint_weights.pickle")
106
+ kwargs_checkpoint_steps.append(checkpoint_step)
107
+ elif "gradients" in self.config.name:
108
+ if "mini" in self.config.name:
109
+ gradient_steps = get_gradient_mini_step(checkpoint_step)
110
+ else:
111
+ gradient_steps = get_gradient_step(checkpoint_step)
112
+
113
+ for gradient_step in gradient_steps:
114
+ to_download_files.append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
115
+ kwargs_checkpoint_steps.append(checkpoint_step)
116
+ kwargs_gradient_steps.append(gradient_step)
117
+ else:
118
+ raise Exception("Invalid config name")
119
+
120
+ downloaded_files = dl_manager.download_and_extract(to_download_files)
121
+
122
+ return [
123
+ datasets.SplitGenerator(
124
+ name='default',
125
+ gen_kwargs={
126
+ "filepaths": downloaded_files,
127
+ "checkpoint_steps": kwargs_checkpoint_steps,
128
+ **({"gradient_steps": kwargs_gradient_steps} if "gradients" in self.config.name else {}),
129
+ }
130
+ )
131
+ ]
132
+
133
+ def _generate_examples(self, filepaths, checkpoint_steps, **kwargs):
134
+
135
+ # the filepaths should be a list of filepaths
136
+ if isinstance(filepaths, str):
137
+ filepaths = [filepaths]
138
+
139
+ if "gradients" in self.config.name:
140
+ gradient_steps = kwargs["gradient_steps"]
141
+
142
+ global_idx = 0 # the unique identifier for the example
143
+
144
+ for idx, filepath in enumerate(filepaths):
145
+ with open(filepath, 'rb') as f:
146
+ data = pickle.load(f)
147
+
148
+ for layer_name, layer_data in data.items():
149
+ record = {
150
+ "checkpoint_step": checkpoint_steps[idx],
151
+ "layer_name": layer_name,
152
+ "data": layer_data,
153
+ }
154
+ if "gradients" in self.config.name:
155
+ record['gradient_step'] = gradient_steps[idx]
156
+
157
+ yield global_idx, record
158
+ global_idx += 1