asigalov61 commited on
Commit
e2fb2a2
1 Parent(s): bb5d53f

Upload 4 files

Browse files
MIDI_Images_Solo_Piano_Dataset_Maker.ipynb ADDED
@@ -0,0 +1,492 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "private_outputs": true,
7
+ "provenance": []
8
+ },
9
+ "kernelspec": {
10
+ "name": "python3",
11
+ "display_name": "Python 3"
12
+ },
13
+ "language_info": {
14
+ "name": "python"
15
+ }
16
+ },
17
+ "cells": [
18
+ {
19
+ "cell_type": "markdown",
20
+ "source": [
21
+ "# MIDI Images Solo Piano Dataset Maker (ver. 1.0)\n",
22
+ "\n",
23
+ "***\n",
24
+ "\n",
25
+ "Powered by tegridy-tools: https://github.com/asigalov61/tegridy-tools\n",
26
+ "\n",
27
+ "***\n",
28
+ "\n",
29
+ "#### Project Los Angeles\n",
30
+ "\n",
31
+ "#### Tegridy Code 2024\n",
32
+ "\n",
33
+ "***"
34
+ ],
35
+ "metadata": {
36
+ "id": "LUgrspEA-68o"
37
+ }
38
+ },
39
+ {
40
+ "cell_type": "markdown",
41
+ "source": [
42
+ "# (SETUP ENVIRONMENT)"
43
+ ],
44
+ "metadata": {
45
+ "id": "7N-KXNgQ_a0h"
46
+ }
47
+ },
48
+ {
49
+ "cell_type": "code",
50
+ "execution_count": null,
51
+ "metadata": {
52
+ "id": "pxNxlyfZ8hCg",
53
+ "cellView": "form"
54
+ },
55
+ "outputs": [],
56
+ "source": [
57
+ "# @title Install dependecies\n",
58
+ "!git clone --depth 1 https://github.com/asigalov61/tegridy-tools"
59
+ ]
60
+ },
61
+ {
62
+ "cell_type": "code",
63
+ "source": [
64
+ "#@title Import all needed modules\n",
65
+ "\n",
66
+ "print('=' * 70)\n",
67
+ "print('Loading core modules...')\n",
68
+ "print('Please wait...')\n",
69
+ "print('=' * 70)\n",
70
+ "\n",
71
+ "import os\n",
72
+ "import copy\n",
73
+ "import math\n",
74
+ "import statistics\n",
75
+ "import random\n",
76
+ "import pickle\n",
77
+ "import shutil\n",
78
+ "from itertools import groupby\n",
79
+ "from collections import Counter\n",
80
+ "from sklearn.metrics import pairwise_distances\n",
81
+ "from sklearn import metrics\n",
82
+ "from joblib import Parallel, delayed, parallel_config\n",
83
+ "import numpy as np\n",
84
+ "from tqdm import tqdm\n",
85
+ "from PIL import Image\n",
86
+ "import matplotlib.pyplot as plt\n",
87
+ "\n",
88
+ "print('Done!')\n",
89
+ "print('=' * 70)\n",
90
+ "print('Creating I/O dirs...')\n",
91
+ "\n",
92
+ "if not os.path.exists('/content/Dataset'):\n",
93
+ " os.makedirs('/content/Dataset')\n",
94
+ "\n",
95
+ "print('Done!')\n",
96
+ "print('=' * 70)\n",
97
+ "print('Loading tegridy-tools modules...')\n",
98
+ "print('=' * 70)\n",
99
+ "\n",
100
+ "%cd /content/tegridy-tools/tegridy-tools\n",
101
+ "\n",
102
+ "import TMIDIX\n",
103
+ "import TMELODIES\n",
104
+ "import TPLOTS\n",
105
+ "import HaystackSearch\n",
106
+ "\n",
107
+ "%cd /content/\n",
108
+ "\n",
109
+ "print('=' * 70)\n",
110
+ "print('Done!')\n",
111
+ "print('=' * 70)"
112
+ ],
113
+ "metadata": {
114
+ "id": "OblKfMMT8rfM",
115
+ "cellView": "form"
116
+ },
117
+ "execution_count": null,
118
+ "outputs": []
119
+ },
120
+ {
121
+ "cell_type": "markdown",
122
+ "source": [
123
+ "# (DOWNLOAD SAMPLE MIDI DATASET)"
124
+ ],
125
+ "metadata": {
126
+ "id": "gUXM7WsN_ioe"
127
+ }
128
+ },
129
+ {
130
+ "cell_type": "code",
131
+ "source": [
132
+ "# @title Download sample MIDI dataset (POP909)\n",
133
+ "%cd /content/Dataset/\n",
134
+ "!git clone --depth 1 https://github.com/music-x-lab/POP909-Dataset\n",
135
+ "%cd /content/"
136
+ ],
137
+ "metadata": {
138
+ "id": "JLm4OmOUYlEK",
139
+ "cellView": "form"
140
+ },
141
+ "execution_count": null,
142
+ "outputs": []
143
+ },
144
+ {
145
+ "cell_type": "code",
146
+ "source": [
147
+ "#@title Save file list\n",
148
+ "###########\n",
149
+ "\n",
150
+ "print('=' * 70)\n",
151
+ "print('Loading MIDI files...')\n",
152
+ "print('This may take a while on a large dataset in particular...')\n",
153
+ "\n",
154
+ "dataset_addr = '/content/Dataset/'\n",
155
+ "\n",
156
+ "# os.chdir(dataset_addr)\n",
157
+ "filez = list()\n",
158
+ "for (dirpath, dirnames, filenames) in os.walk(dataset_addr):\n",
159
+ " filez += [os.path.join(dirpath, file) for file in filenames if file.endswith('.mid') or file.endswith('.midi') or file.endswith('.kar')]\n",
160
+ "print('=' * 70)\n",
161
+ "\n",
162
+ "if filez == []:\n",
163
+ " print('Could not find any MIDI files. Please check Dataset dir...')\n",
164
+ " print('=' * 70)\n",
165
+ "\n",
166
+ "print('Randomizing file list...')\n",
167
+ "random.shuffle(filez)\n",
168
+ "print('Done!')\n",
169
+ "print('=' * 70)\n",
170
+ "print('Total found MIDI files:', len(filez))\n",
171
+ "print('=' * 70)\n",
172
+ "\n",
173
+ "TMIDIX.Tegridy_Any_Pickle_File_Writer(filez, 'filez')\n",
174
+ "\n",
175
+ "print('=' * 70)"
176
+ ],
177
+ "metadata": {
178
+ "cellView": "form",
179
+ "id": "AJrFrZ9grhMM"
180
+ },
181
+ "execution_count": null,
182
+ "outputs": []
183
+ },
184
+ {
185
+ "cell_type": "markdown",
186
+ "source": [
187
+ "# (LOAD TMIDIX MIDI PROCESSOR)"
188
+ ],
189
+ "metadata": {
190
+ "id": "RJeTdierAbeF"
191
+ }
192
+ },
193
+ {
194
+ "cell_type": "code",
195
+ "source": [
196
+ "#@title Load TMIDIX MIDI processor\n",
197
+ "\n",
198
+ "print('=' * 70)\n",
199
+ "print('TMIDIX MIDI Processor')\n",
200
+ "print('=' * 70)\n",
201
+ "print('Loading...')\n",
202
+ "\n",
203
+ "###########\n",
204
+ "\n",
205
+ "def TMIDIX_MIDI_Processor(midi_file):\n",
206
+ "\n",
207
+ " fn = os.path.basename(midi_file)\n",
208
+ " fn1 = fn.split('.mid')[0]\n",
209
+ "\n",
210
+ " try:\n",
211
+ "\n",
212
+ " #=======================================================\n",
213
+ " # START PROCESSING\n",
214
+ "\n",
215
+ " raw_score = TMIDIX.midi2single_track_ms_score(midi_file)\n",
216
+ "\n",
217
+ " escore_notes = TMIDIX.advanced_score_processor(raw_score, return_enhanced_score_notes=True)[0]\n",
218
+ "\n",
219
+ " escore_notes = TMIDIX.augment_enhanced_score_notes(escore_notes, timings_divider=256)\n",
220
+ "\n",
221
+ " sp_escore_notes = TMIDIX.recalculate_score_timings(TMIDIX.solo_piano_escore_notes(escore_notes, keep_drums=False))\n",
222
+ "\n",
223
+ " if sp_escore_notes:\n",
224
+ "\n",
225
+ " bmatrix = TMIDIX.escore_notes_to_binary_matrix(sp_escore_notes)\n",
226
+ "\n",
227
+ " return [fn1, bmatrix]\n",
228
+ "\n",
229
+ " else:\n",
230
+ " return [fn1, []]\n",
231
+ "\n",
232
+ " #=======================================================\n",
233
+ "\n",
234
+ " except Exception as ex:\n",
235
+ " print('WARNING !!!')\n",
236
+ " print('=' * 70)\n",
237
+ " print('Bad MIDI:', midi_file)\n",
238
+ " print('Error detected:', ex)\n",
239
+ " print('=' * 70)\n",
240
+ " return None\n",
241
+ "\n",
242
+ "print('Done!')\n",
243
+ "print('=' * 70)"
244
+ ],
245
+ "metadata": {
246
+ "cellView": "form",
247
+ "id": "fBbIiUWSZA5y"
248
+ },
249
+ "execution_count": null,
250
+ "outputs": []
251
+ },
252
+ {
253
+ "cell_type": "markdown",
254
+ "source": [
255
+ "# (PROCESS MIDIs)"
256
+ ],
257
+ "metadata": {
258
+ "id": "R3QxQN6OA_jX"
259
+ }
260
+ },
261
+ {
262
+ "cell_type": "code",
263
+ "source": [
264
+ "#@title Process MIDIs with TMIDIX MIDI processor\n",
265
+ "output_folder = \"/content/MIDI-Images/\" # @param {\"type\":\"string\"}\n",
266
+ "\n",
267
+ "NUMBER_OF_PARALLEL_JOBS = 4 # Number of parallel jobs\n",
268
+ "NUMBER_OF_FILES_PER_ITERATION = 4 # Number of files to queue for each parallel iteration\n",
269
+ "SAVE_EVERY_NUMBER_OF_ITERATIONS = 128 # Save every 2560 files\n",
270
+ "\n",
271
+ "print('=' * 70)\n",
272
+ "print('TMIDIX MIDI Processor')\n",
273
+ "print('=' * 70)\n",
274
+ "print('Starting up...')\n",
275
+ "print('=' * 70)\n",
276
+ "\n",
277
+ "###########\n",
278
+ "\n",
279
+ "melody_chords_f = []\n",
280
+ "\n",
281
+ "files_count = 0\n",
282
+ "\n",
283
+ "print('Processing MIDI files...')\n",
284
+ "print('Please wait...')\n",
285
+ "print('=' * 70)\n",
286
+ "\n",
287
+ "for i in tqdm(range(0, len(filez), NUMBER_OF_FILES_PER_ITERATION)):\n",
288
+ "\n",
289
+ " with parallel_config(backend='threading', n_jobs=NUMBER_OF_PARALLEL_JOBS, verbose = 0):\n",
290
+ "\n",
291
+ " output = Parallel(n_jobs=NUMBER_OF_PARALLEL_JOBS, verbose=0)(delayed(TMIDIX_MIDI_Processor)(f) for f in filez[i:i+NUMBER_OF_FILES_PER_ITERATION])\n",
292
+ "\n",
293
+ " for o in output:\n",
294
+ "\n",
295
+ " if o is not None:\n",
296
+ " melody_chords_f.append(o)\n",
297
+ "\n",
298
+ " if i % (NUMBER_OF_FILES_PER_ITERATION * SAVE_EVERY_NUMBER_OF_ITERATIONS) == 0 and i != 0:\n",
299
+ "\n",
300
+ " print('SAVING !!!')\n",
301
+ " print('=' * 70)\n",
302
+ " print('Saving processed files...')\n",
303
+ " files_count += len(melody_chords_f)\n",
304
+ " print('=' * 70)\n",
305
+ " print('Processed so far:', files_count, 'out of', len(filez), '===', files_count / len(filez), 'good files ratio')\n",
306
+ " print('=' * 70)\n",
307
+ " print('Writing images...')\n",
308
+ " print('Please wait...')\n",
309
+ "\n",
310
+ " for mat in melody_chords_f:\n",
311
+ "\n",
312
+ " if mat[1]:\n",
313
+ "\n",
314
+ " TPLOTS.binary_matrix_to_images(mat[1],\n",
315
+ " 128,\n",
316
+ " 32,\n",
317
+ " output_folder=output_folder+str(mat[0])+'/',\n",
318
+ " output_img_prefix=str(mat[0]),\n",
319
+ " output_img_ext='.png',\n",
320
+ " verbose=False\n",
321
+ " )\n",
322
+ "\n",
323
+ " print('Done!')\n",
324
+ " print('=' * 70)\n",
325
+ " melody_chords_f = []\n",
326
+ "\n",
327
+ "print('SAVING !!!')\n",
328
+ "print('=' * 70)\n",
329
+ "print('Saving processed files...')\n",
330
+ "files_count += len(melody_chords_f)\n",
331
+ "print('=' * 70)\n",
332
+ "print('Processed so far:', files_count, 'out of', len(filez), '===', files_count / len(filez), 'good files ratio')\n",
333
+ "print('=' * 70)\n",
334
+ "print('Writing images...')\n",
335
+ "print('Please wait...')\n",
336
+ "\n",
337
+ "for mat in melody_chords_f:\n",
338
+ "\n",
339
+ " if mat[1]:\n",
340
+ "\n",
341
+ " TPLOTS.binary_matrix_to_images(mat[1],\n",
342
+ " 128,\n",
343
+ " 32,\n",
344
+ " output_folder=output_folder+str(mat[0])+'/',\n",
345
+ " output_img_prefix=str(mat[0]),\n",
346
+ " output_img_ext='.png',\n",
347
+ " verbose=False\n",
348
+ " )\n",
349
+ "\n",
350
+ "print('Done!')\n",
351
+ "print('=' * 70)"
352
+ ],
353
+ "metadata": {
354
+ "cellView": "form",
355
+ "id": "15y4uzSOZX52"
356
+ },
357
+ "execution_count": null,
358
+ "outputs": []
359
+ },
360
+ {
361
+ "cell_type": "markdown",
362
+ "source": [
363
+ "# (LOAD IMAGES)"
364
+ ],
365
+ "metadata": {
366
+ "id": "GtejvUFAFocZ"
367
+ }
368
+ },
369
+ {
370
+ "cell_type": "code",
371
+ "source": [
372
+ "#@title Load created MIDI images\n",
373
+ "full_path_to_metadata_pickle_files = \"/content/MIDI-Images\" #@param {type:\"string\"}\n",
374
+ "\n",
375
+ "print('=' * 70)\n",
376
+ "print('MIDI Images Reader')\n",
377
+ "print('=' * 70)\n",
378
+ "print('Searching for images...')\n",
379
+ "\n",
380
+ "filez = list()\n",
381
+ "for (dirpath, dirnames, filenames) in os.walk(full_path_to_metadata_pickle_files):\n",
382
+ " filez += [os.path.join(dirpath, file) for file in filenames if file.endswith('.png')]\n",
383
+ "print('=' * 70)\n",
384
+ "\n",
385
+ "filez.sort()\n",
386
+ "\n",
387
+ "print('Found', len(filez), 'images!')\n",
388
+ "print('=' * 70)\n",
389
+ "print('Reading images...')\n",
390
+ "print('Please wait...')\n",
391
+ "print('=' * 70)\n",
392
+ "\n",
393
+ "fidx = 0\n",
394
+ "\n",
395
+ "all_read_images = []\n",
396
+ "\n",
397
+ "for img in tqdm(filez):\n",
398
+ "\n",
399
+ " img = Image.open(img)\n",
400
+ "\n",
401
+ " img_arr = np.array(img).tolist()\n",
402
+ "\n",
403
+ " all_read_images.append(img_arr)\n",
404
+ "\n",
405
+ " fidx += 1\n",
406
+ "\n",
407
+ "print('Done!')\n",
408
+ "print('=' * 70)\n",
409
+ "print('Loaded', fidx, 'images!')\n",
410
+ "print('=' * 70)\n",
411
+ "print('Done!')\n",
412
+ "print('=' * 70)"
413
+ ],
414
+ "metadata": {
415
+ "cellView": "form",
416
+ "id": "cXpLWHG1dBB3"
417
+ },
418
+ "execution_count": null,
419
+ "outputs": []
420
+ },
421
+ {
422
+ "cell_type": "markdown",
423
+ "source": [
424
+ "# (TEST IMAGES)"
425
+ ],
426
+ "metadata": {
427
+ "id": "qbClHSmhB1NF"
428
+ }
429
+ },
430
+ {
431
+ "cell_type": "code",
432
+ "source": [
433
+ "# @title Test created MIDI images\n",
434
+ "\n",
435
+ "print('=' * 70)\n",
436
+ "\n",
437
+ "image = random.choice(all_read_images)\n",
438
+ "\n",
439
+ "escore = TMIDIX.binary_matrix_to_original_escore_notes(image)\n",
440
+ "\n",
441
+ "output_score, patches, overflow_patches = TMIDIX.patch_enhanced_score_notes(escore)\n",
442
+ "\n",
443
+ "detailed_stats = TMIDIX.Tegridy_ms_SONG_to_MIDI_Converter(output_score,\n",
444
+ " output_signature = 'MIDI Images',\n",
445
+ " output_file_name = '/content/MIDI-Images-Composition',\n",
446
+ " track_name='Project Los Angeles',\n",
447
+ " list_of_MIDI_patches=patches,\n",
448
+ " timings_multiplier=256\n",
449
+ " )\n",
450
+ "\n",
451
+ "print('=' * 70)"
452
+ ],
453
+ "metadata": {
454
+ "id": "nrPDM1VQdKES",
455
+ "cellView": "form"
456
+ },
457
+ "execution_count": null,
458
+ "outputs": []
459
+ },
460
+ {
461
+ "cell_type": "markdown",
462
+ "source": [
463
+ "# (ZIP IMAGES)"
464
+ ],
465
+ "metadata": {
466
+ "id": "sIq55gvPCgJh"
467
+ }
468
+ },
469
+ {
470
+ "cell_type": "code",
471
+ "source": [
472
+ "# @title Zip created MIDI images\n",
473
+ "!zip -9 -r POP909_MIDI_Images_128_128_32_BW.zip MIDI-Images/ > /dev/null"
474
+ ],
475
+ "metadata": {
476
+ "id": "tVe0REKSqJeV",
477
+ "cellView": "form"
478
+ },
479
+ "execution_count": null,
480
+ "outputs": []
481
+ },
482
+ {
483
+ "cell_type": "markdown",
484
+ "source": [
485
+ "# Congrats! You did it! :)"
486
+ ],
487
+ "metadata": {
488
+ "id": "iDdMYg4haGFn"
489
+ }
490
+ }
491
+ ]
492
+ }
TMIDIX.py ADDED
The diff for this file is too large to render. See raw diff
 
TPLOTS.py ADDED
@@ -0,0 +1,1205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/python3
2
+
3
+ r'''############################################################################
4
+ ################################################################################
5
+ #
6
+ #
7
+ # Tegridy Plots Python Module (TPLOTS)
8
+ # Version 1.0
9
+ #
10
+ # Project Los Angeles
11
+ #
12
+ # Tegridy Code 2024
13
+ #
14
+ # https://github.com/asigalov61/tegridy-tools
15
+ #
16
+ #
17
+ ################################################################################
18
+ #
19
+ # Copyright 2024 Project Los Angeles / Tegridy Code
20
+ #
21
+ # Licensed under the Apache License, Version 2.0 (the "License");
22
+ # you may not use this file except in compliance with the License.
23
+ # You may obtain a copy of the License at
24
+ #
25
+ # http://www.apache.org/licenses/LICENSE-2.0
26
+ #
27
+ # Unless required by applicable law or agreed to in writing, software
28
+ # distributed under the License is distributed on an "AS IS" BASIS,
29
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
30
+ # See the License for the specific language governing permissions and
31
+ # limitations under the License.
32
+ #
33
+ ################################################################################
34
+ ################################################################################
35
+ #
36
+ # Critical dependencies
37
+ #
38
+ # !pip install numpy
39
+ # !pip install scipy
40
+ # !pip install matplotlib
41
+ # !pip install networkx
42
+ # !pip3 install scikit-learn
43
+ #
44
+ ################################################################################
45
+ #
46
+ # Future critical dependencies
47
+ #
48
+ # !pip install umap-learn
49
+ # !pip install alphashape
50
+ #
51
+ ################################################################################
52
+ '''
53
+
54
+ ################################################################################
55
+ # Modules imports
56
+ ################################################################################
57
+
58
+ import os
59
+ from collections import Counter
60
+ from itertools import groupby
61
+
62
+ import numpy as np
63
+
64
+ import networkx as nx
65
+
66
+ from sklearn.manifold import TSNE
67
+ from sklearn import metrics
68
+ from sklearn.preprocessing import MinMaxScaler
69
+ from sklearn.decomposition import PCA
70
+
71
+ from scipy.ndimage import zoom
72
+ from scipy.spatial import distance_matrix
73
+ from scipy.sparse.csgraph import minimum_spanning_tree
74
+ from scipy.stats import zscore
75
+
76
+ import matplotlib.pyplot as plt
77
+ from PIL import Image
78
+
79
+ ################################################################################
80
+ # Constants
81
+ ################################################################################
82
+
83
+ ALL_CHORDS_FILTERED = [[0], [0, 3], [0, 3, 5], [0, 3, 5, 8], [0, 3, 5, 9], [0, 3, 5, 10], [0, 3, 7],
84
+ [0, 3, 7, 10], [0, 3, 8], [0, 3, 9], [0, 3, 10], [0, 4], [0, 4, 6],
85
+ [0, 4, 6, 9], [0, 4, 6, 10], [0, 4, 7], [0, 4, 7, 10], [0, 4, 8], [0, 4, 9],
86
+ [0, 4, 10], [0, 5], [0, 5, 8], [0, 5, 9], [0, 5, 10], [0, 6], [0, 6, 9],
87
+ [0, 6, 10], [0, 7], [0, 7, 10], [0, 8], [0, 9], [0, 10], [1], [1, 4],
88
+ [1, 4, 6], [1, 4, 6, 9], [1, 4, 6, 10], [1, 4, 6, 11], [1, 4, 7],
89
+ [1, 4, 7, 10], [1, 4, 7, 11], [1, 4, 8], [1, 4, 8, 11], [1, 4, 9], [1, 4, 10],
90
+ [1, 4, 11], [1, 5], [1, 5, 8], [1, 5, 8, 11], [1, 5, 9], [1, 5, 10],
91
+ [1, 5, 11], [1, 6], [1, 6, 9], [1, 6, 10], [1, 6, 11], [1, 7], [1, 7, 10],
92
+ [1, 7, 11], [1, 8], [1, 8, 11], [1, 9], [1, 10], [1, 11], [2], [2, 5],
93
+ [2, 5, 8], [2, 5, 8, 11], [2, 5, 9], [2, 5, 10], [2, 5, 11], [2, 6], [2, 6, 9],
94
+ [2, 6, 10], [2, 6, 11], [2, 7], [2, 7, 10], [2, 7, 11], [2, 8], [2, 8, 11],
95
+ [2, 9], [2, 10], [2, 11], [3], [3, 5], [3, 5, 8], [3, 5, 8, 11], [3, 5, 9],
96
+ [3, 5, 10], [3, 5, 11], [3, 7], [3, 7, 10], [3, 7, 11], [3, 8], [3, 8, 11],
97
+ [3, 9], [3, 10], [3, 11], [4], [4, 6], [4, 6, 9], [4, 6, 10], [4, 6, 11],
98
+ [4, 7], [4, 7, 10], [4, 7, 11], [4, 8], [4, 8, 11], [4, 9], [4, 10], [4, 11],
99
+ [5], [5, 8], [5, 8, 11], [5, 9], [5, 10], [5, 11], [6], [6, 9], [6, 10],
100
+ [6, 11], [7], [7, 10], [7, 11], [8], [8, 11], [9], [10], [11]]
101
+
102
+ ################################################################################
103
+
104
+ CHORDS_TYPES = ['WHITE', 'BLACK', 'UNKNOWN', 'MIXED WHITE', 'MIXED BLACK', 'MIXED GRAY']
105
+
106
+ ################################################################################
107
+
108
+ WHITE_NOTES = [0, 2, 4, 5, 7, 9, 11]
109
+
110
+ ################################################################################
111
+
112
+ BLACK_NOTES = [1, 3, 6, 8, 10]
113
+
114
+ ################################################################################
115
+ # Helper functions
116
+ ################################################################################
117
+
118
+ def tones_chord_type(tones_chord,
119
+ return_chord_type_index=True,
120
+ ):
121
+
122
+ """
123
+ Returns tones chord type
124
+ """
125
+
126
+ WN = WHITE_NOTES
127
+ BN = BLACK_NOTES
128
+ MX = WHITE_NOTES + BLACK_NOTES
129
+
130
+
131
+ CHORDS = ALL_CHORDS_FILTERED
132
+
133
+ tones_chord = sorted(tones_chord)
134
+
135
+ ctype = 'UNKNOWN'
136
+
137
+ if tones_chord in CHORDS:
138
+
139
+ if sorted(set(tones_chord) & set(WN)) == tones_chord:
140
+ ctype = 'WHITE'
141
+
142
+ elif sorted(set(tones_chord) & set(BN)) == tones_chord:
143
+ ctype = 'BLACK'
144
+
145
+ if len(tones_chord) > 1 and sorted(set(tones_chord) & set(MX)) == tones_chord:
146
+
147
+ if len(sorted(set(tones_chord) & set(WN))) == len(sorted(set(tones_chord) & set(BN))):
148
+ ctype = 'MIXED GRAY'
149
+
150
+ elif len(sorted(set(tones_chord) & set(WN))) > len(sorted(set(tones_chord) & set(BN))):
151
+ ctype = 'MIXED WHITE'
152
+
153
+ elif len(sorted(set(tones_chord) & set(WN))) < len(sorted(set(tones_chord) & set(BN))):
154
+ ctype = 'MIXED BLACK'
155
+
156
+ if return_chord_type_index:
157
+ return CHORDS_TYPES.index(ctype)
158
+
159
+ else:
160
+ return ctype
161
+
162
+ ###################################################################################
163
+
164
+ def tone_type(tone,
165
+ return_tone_type_index=True
166
+ ):
167
+
168
+ """
169
+ Returns tone type
170
+ """
171
+
172
+ tone = tone % 12
173
+
174
+ if tone in BLACK_NOTES:
175
+ if return_tone_type_index:
176
+ return CHORDS_TYPES.index('BLACK')
177
+ else:
178
+ return "BLACK"
179
+
180
+ else:
181
+ if return_tone_type_index:
182
+ return CHORDS_TYPES.index('WHITE')
183
+ else:
184
+ return "WHITE"
185
+
186
+ ###################################################################################
187
+
188
+ def find_closest_points(points, return_points=True):
189
+
190
+ """
191
+ Find closest 2D points
192
+ """
193
+
194
+ coords = np.array(points)
195
+
196
+ num_points = coords.shape[0]
197
+ closest_matches = np.zeros(num_points, dtype=int)
198
+ distances = np.zeros((num_points, num_points))
199
+
200
+ for i in range(num_points):
201
+ for j in range(num_points):
202
+ if i != j:
203
+ distances[i, j] = np.linalg.norm(coords[i] - coords[j])
204
+ else:
205
+ distances[i, j] = np.inf
206
+
207
+ closest_matches = np.argmin(distances, axis=1)
208
+
209
+ if return_points:
210
+ points_matches = coords[closest_matches].tolist()
211
+ return points_matches
212
+
213
+ else:
214
+ return closest_matches.tolist()
215
+
216
+ ################################################################################
217
+
218
+ def reduce_dimensionality_tsne(list_of_valies,
219
+ n_comp=2,
220
+ n_iter=5000,
221
+ verbose=True
222
+ ):
223
+
224
+ """
225
+ Reduces the dimensionality of the values using t-SNE.
226
+ """
227
+
228
+ vals = np.array(list_of_valies)
229
+
230
+ tsne = TSNE(n_components=n_comp,
231
+ n_iter=n_iter,
232
+ verbose=verbose)
233
+
234
+ reduced_vals = tsne.fit_transform(vals)
235
+
236
+ return reduced_vals.tolist()
237
+
238
+ ################################################################################
239
+
240
+ def compute_mst_edges(similarity_scores_list):
241
+
242
+ """
243
+ Computes the Minimum Spanning Tree (MST) edges based on the similarity scores.
244
+ """
245
+
246
+ num_tokens = len(similarity_scores_list[0])
247
+
248
+ graph = nx.Graph()
249
+
250
+ for i in range(num_tokens):
251
+ for j in range(i + 1, num_tokens):
252
+ weight = 1 - similarity_scores_list[i][j]
253
+ graph.add_edge(i, j, weight=weight)
254
+
255
+ mst = nx.minimum_spanning_tree(graph)
256
+
257
+ mst_edges = list(mst.edges(data=False))
258
+
259
+ return mst_edges
260
+
261
+ ################################################################################
262
+
263
+ def square_binary_matrix(binary_matrix,
264
+ matrix_size=128,
265
+ interpolation_order=5,
266
+ return_square_matrix_points=False
267
+ ):
268
+
269
+ """
270
+ Reduces an arbitrary binary matrix to a square binary matrix
271
+ """
272
+
273
+ zoom_factors = (matrix_size / len(binary_matrix), 1)
274
+
275
+ resized_matrix = zoom(binary_matrix, zoom_factors, order=interpolation_order)
276
+
277
+ resized_matrix = (resized_matrix > 0.5).astype(int)
278
+
279
+ final_matrix = np.zeros((matrix_size, matrix_size), dtype=int)
280
+ final_matrix[:, :resized_matrix.shape[1]] = resized_matrix
281
+
282
+ points = np.column_stack(np.where(final_matrix == 1)).tolist()
283
+
284
+ if return_square_matrix_points:
285
+ return points
286
+
287
+ else:
288
+ return resized_matrix
289
+
290
+ ################################################################################
291
+
292
+ def square_matrix_points_colors(square_matrix_points):
293
+
294
+ """
295
+ Returns colors for square matrix points
296
+ """
297
+
298
+ cmap = generate_colors(12)
299
+
300
+ chords = []
301
+ chords_dict = set()
302
+ counts = []
303
+
304
+ for k, v in groupby(square_matrix_points, key=lambda x: x[0]):
305
+ pgroup = [vv[1] for vv in v]
306
+ chord = sorted(set(pgroup))
307
+ tchord = sorted(set([p % 12 for p in chord]))
308
+ chords_dict.add(tuple(tchord))
309
+ chords.append(tuple(tchord))
310
+ counts.append(len(pgroup))
311
+
312
+ chords_dict = sorted(chords_dict)
313
+
314
+ colors = []
315
+
316
+ for i, c in enumerate(chords):
317
+ colors.extend([cmap[round(sum(c) / len(c))]] * counts[i])
318
+
319
+ return colors
320
+
321
+ ################################################################################
322
+
323
+ def hsv_to_rgb(h, s, v):
324
+
325
+ if s == 0.0:
326
+ return v, v, v
327
+
328
+ i = int(h*6.0)
329
+ f = (h*6.0) - i
330
+ p = v*(1.0 - s)
331
+ q = v*(1.0 - s*f)
332
+ t = v*(1.0 - s*(1.0-f))
333
+ i = i%6
334
+
335
+ return [(v, t, p), (q, v, p), (p, v, t), (p, q, v), (t, p, v), (v, p, q)][i]
336
+
337
+ ################################################################################
338
+
339
+ def generate_colors(n):
340
+ return [hsv_to_rgb(i/n, 1, 1) for i in range(n)]
341
+
342
+ ################################################################################
343
+
344
+ def add_arrays(a, b):
345
+ return [sum(pair) for pair in zip(a, b)]
346
+
347
+ ################################################################################
348
+
349
+ def calculate_similarities(lists_of_values, metric='cosine'):
350
+ return metrics.pairwise_distances(lists_of_values, metric=metric).tolist()
351
+
352
+ ################################################################################
353
+
354
+ def get_tokens_embeddings(x_transformer_model):
355
+ return x_transformer_model.net.token_emb.emb.weight.detach().cpu().tolist()
356
+
357
+ ################################################################################
358
+
359
+ def minkowski_distance_matrix(X, p=3):
360
+
361
+ X = np.array(X)
362
+
363
+ n = X.shape[0]
364
+ dist_matrix = np.zeros((n, n))
365
+
366
+ for i in range(n):
367
+ for j in range(n):
368
+ dist_matrix[i, j] = np.sum(np.abs(X[i] - X[j])**p)**(1/p)
369
+
370
+ return dist_matrix.tolist()
371
+
372
+ ################################################################################
373
+
374
+ def robust_normalize(values):
375
+
376
+ values = np.array(values)
377
+ q1 = np.percentile(values, 25)
378
+ q3 = np.percentile(values, 75)
379
+ iqr = q3 - q1
380
+
381
+ filtered_values = values[(values >= q1 - 1.5 * iqr) & (values <= q3 + 1.5 * iqr)]
382
+
383
+ min_val = np.min(filtered_values)
384
+ max_val = np.max(filtered_values)
385
+ normalized_values = (values - min_val) / (max_val - min_val)
386
+
387
+ normalized_values = np.clip(normalized_values, 0, 1)
388
+
389
+ return normalized_values.tolist()
390
+
391
+ ################################################################################
392
+
393
+ def min_max_normalize(values):
394
+
395
+ scaler = MinMaxScaler()
396
+
397
+ return scaler.fit_transform(values).tolist()
398
+
399
+ ################################################################################
400
+
401
+ def remove_points_outliers(points, z_score_threshold=3):
402
+
403
+ points = np.array(points)
404
+
405
+ z_scores = np.abs(zscore(points, axis=0))
406
+
407
+ return points[(z_scores < z_score_threshold).all(axis=1)].tolist()
408
+
409
+ ################################################################################
410
+
411
+ def generate_labels(lists_of_values,
412
+ return_indices_labels=False
413
+ ):
414
+
415
+ ordered_indices = list(range(len(lists_of_values)))
416
+ ordered_indices_labels = [str(i) for i in ordered_indices]
417
+ ordered_values_labels = [str(lists_of_values[i]) for i in ordered_indices]
418
+
419
+ if return_indices_labels:
420
+ return ordered_indices_labels
421
+
422
+ else:
423
+ return ordered_values_labels
424
+
425
+ ################################################################################
426
+
427
+ def reduce_dimensionality_pca(list_of_values, n_components=2):
428
+
429
+ """
430
+ Reduces the dimensionality of the values using PCA.
431
+ """
432
+
433
+ pca = PCA(n_components=n_components)
434
+ pca_data = pca.fit_transform(list_of_values)
435
+
436
+ return pca_data.tolist()
437
+
438
+ def reduce_dimensionality_simple(list_of_values,
439
+ return_means=True,
440
+ return_std_devs=True,
441
+ return_medians=False,
442
+ return_vars=False
443
+ ):
444
+
445
+ '''
446
+ Reduces dimensionality of the values in a simple way
447
+ '''
448
+
449
+ array = np.array(list_of_values)
450
+ results = []
451
+
452
+ if return_means:
453
+ means = np.mean(array, axis=1)
454
+ results.append(means)
455
+
456
+ if return_std_devs:
457
+ std_devs = np.std(array, axis=1)
458
+ results.append(std_devs)
459
+
460
+ if return_medians:
461
+ medians = np.median(array, axis=1)
462
+ results.append(medians)
463
+
464
+ if return_vars:
465
+ vars = np.var(array, axis=1)
466
+ results.append(vars)
467
+
468
+ merged_results = np.column_stack(results)
469
+
470
+ return merged_results.tolist()
471
+
472
+ ################################################################################
473
+
474
+ def reduce_dimensionality_2d_distance(list_of_values, p=5):
475
+
476
+ '''
477
+ Reduces the dimensionality of the values using 2d distance
478
+ '''
479
+
480
+ values = np.array(list_of_values)
481
+
482
+ dist_matrix = distance_matrix(values, values, p=p)
483
+
484
+ mst = minimum_spanning_tree(dist_matrix).toarray()
485
+
486
+ points = []
487
+
488
+ for i in range(len(values)):
489
+ for j in range(len(values)):
490
+ if mst[i, j] > 0:
491
+ points.append([i, j])
492
+
493
+ return points
494
+
495
+ ################################################################################
496
+
497
+ def normalize_to_range(values, n):
498
+
499
+ min_val = min(values)
500
+ max_val = max(values)
501
+
502
+ range_val = max_val - min_val
503
+
504
+ normalized_values = [((value - min_val) / range_val * 2 * n) - n for value in values]
505
+
506
+ return normalized_values
507
+
508
+ ################################################################################
509
+
510
+ def reduce_dimensionality_simple_pca(list_of_values, n_components=2):
511
+
512
+ '''
513
+ Reduces the dimensionality of the values using simple PCA
514
+ '''
515
+
516
+ reduced_values = []
517
+
518
+ for l in list_of_values:
519
+
520
+ norm_values = [round(v * len(l)) for v in normalize_to_range(l, (n_components+1) // 2)]
521
+
522
+ pca_values = Counter(norm_values).most_common()
523
+ pca_values = [vv[0] / len(l) for vv in pca_values]
524
+ pca_values = pca_values[:n_components]
525
+ pca_values = pca_values + [0] * (n_components - len(pca_values))
526
+
527
+ reduced_values.append(pca_values)
528
+
529
+ return reduced_values
530
+
531
+ ################################################################################
532
+
533
+ def filter_and_replace_values(list_of_values,
534
+ threshold,
535
+ replace_value,
536
+ replace_above_threshold=False
537
+ ):
538
+
539
+ array = np.array(list_of_values)
540
+
541
+ modified_array = np.copy(array)
542
+
543
+ if replace_above_threshold:
544
+ modified_array[modified_array > threshold] = replace_value
545
+
546
+ else:
547
+ modified_array[modified_array < threshold] = replace_value
548
+
549
+ return modified_array.tolist()
550
+
551
+ ################################################################################
552
+
553
+ def find_shortest_constellation_path(points,
554
+ start_point_idx,
555
+ end_point_idx,
556
+ p=5,
557
+ return_path_length=False,
558
+ return_path_points=False,
559
+ ):
560
+
561
+ """
562
+ Finds the shortest path between two points of the points constellation
563
+ """
564
+
565
+ points = np.array(points)
566
+
567
+ dist_matrix = distance_matrix(points, points, p=p)
568
+
569
+ mst = minimum_spanning_tree(dist_matrix).toarray()
570
+
571
+ G = nx.Graph()
572
+
573
+ for i in range(len(points)):
574
+ for j in range(len(points)):
575
+ if mst[i, j] > 0:
576
+ G.add_edge(i, j, weight=mst[i, j])
577
+
578
+ path = nx.shortest_path(G,
579
+ source=start_point_idx,
580
+ target=end_point_idx,
581
+ weight='weight'
582
+ )
583
+
584
+ path_length = nx.shortest_path_length(G,
585
+ source=start_point_idx,
586
+ target=end_point_idx,
587
+ weight='weight')
588
+
589
+ path_points = points[np.array(path)].tolist()
590
+
591
+
592
+ if return_path_points:
593
+ return path_points
594
+
595
+ if return_path_length:
596
+ return path_length
597
+
598
+ return path
599
+
600
+ ################################################################################
601
+ # Core functions
602
+ ################################################################################
603
+
604
+ def plot_ms_SONG(ms_song,
605
+ preview_length_in_notes=0,
606
+ block_lines_times_list = None,
607
+ plot_title='ms Song',
608
+ max_num_colors=129,
609
+ drums_color_num=128,
610
+ plot_size=(11,4),
611
+ note_height = 0.75,
612
+ show_grid_lines=False,
613
+ return_plt = False,
614
+ timings_multiplier=1,
615
+ save_plt='',
616
+ save_only_plt_image=True,
617
+ save_transparent=False
618
+ ):
619
+
620
+ '''ms SONG plot'''
621
+
622
+ notes = [s for s in ms_song if s[0] == 'note']
623
+
624
+ if (len(max(notes, key=len)) != 7) and (len(min(notes, key=len)) != 7):
625
+ print('The song notes do not have patches information')
626
+ print('Ploease add patches to the notes in the song')
627
+
628
+ else:
629
+
630
+ start_times = [(s[1] * timings_multiplier) / 1000 for s in notes]
631
+ durations = [(s[2] * timings_multiplier) / 1000 for s in notes]
632
+ pitches = [s[4] for s in notes]
633
+ patches = [s[6] for s in notes]
634
+
635
+ colors = generate_colors(max_num_colors)
636
+ colors[drums_color_num] = (1, 1, 1)
637
+
638
+ pbl = (notes[preview_length_in_notes][1] * timings_multiplier) / 1000
639
+
640
+ fig, ax = plt.subplots(figsize=plot_size)
641
+
642
+ for start, duration, pitch, patch in zip(start_times, durations, pitches, patches):
643
+ rect = plt.Rectangle((start, pitch), duration, note_height, facecolor=colors[patch])
644
+ ax.add_patch(rect)
645
+
646
+ ax.set_xlim([min(start_times), max(add_arrays(start_times, durations))])
647
+ ax.set_ylim([min(pitches)-1, max(pitches)+1])
648
+
649
+ ax.set_facecolor('black')
650
+ fig.patch.set_facecolor('white')
651
+
652
+ if preview_length_in_notes > 0:
653
+ ax.axvline(x=pbl, c='white')
654
+
655
+ if block_lines_times_list:
656
+ for bl in block_lines_times_list:
657
+ ax.axvline(x=bl, c='white')
658
+
659
+ if show_grid_lines:
660
+ ax.grid(color='white')
661
+
662
+ plt.xlabel('Time (s)', c='black')
663
+ plt.ylabel('MIDI Pitch', c='black')
664
+
665
+ plt.title(plot_title)
666
+
667
+ if save_plt != '':
668
+ if save_only_plt_image:
669
+ plt.axis('off')
670
+ plt.title('')
671
+ plt.savefig(save_plt,
672
+ transparent=save_transparent,
673
+ bbox_inches='tight',
674
+ pad_inches=0,
675
+ facecolor='black'
676
+ )
677
+ plt.close()
678
+
679
+ else:
680
+ plt.savefig(save_plt)
681
+ plt.close()
682
+
683
+ if return_plt:
684
+ return fig
685
+
686
+ plt.show()
687
+ plt.close()
688
+
689
+ ################################################################################
690
+
691
+ def plot_square_matrix_points(list_of_points,
692
+ list_of_points_colors,
693
+ plot_size=(7, 7),
694
+ point_size = 10,
695
+ show_grid_lines=False,
696
+ plot_title = 'Square Matrix Points Plot',
697
+ return_plt=False,
698
+ save_plt='',
699
+ save_only_plt_image=True,
700
+ save_transparent=False
701
+ ):
702
+
703
+ '''Square matrix points plot'''
704
+
705
+ fig, ax = plt.subplots(figsize=plot_size)
706
+
707
+ ax.set_facecolor('black')
708
+
709
+ if show_grid_lines:
710
+ ax.grid(color='white')
711
+
712
+ plt.xlabel('Time Step', c='black')
713
+ plt.ylabel('MIDI Pitch', c='black')
714
+
715
+ plt.title(plot_title)
716
+
717
+ plt.scatter([p[0] for p in list_of_points],
718
+ [p[1] for p in list_of_points],
719
+ c=list_of_points_colors,
720
+ s=point_size
721
+ )
722
+
723
+ if save_plt != '':
724
+ if save_only_plt_image:
725
+ plt.axis('off')
726
+ plt.title('')
727
+ plt.savefig(save_plt,
728
+ transparent=save_transparent,
729
+ bbox_inches='tight',
730
+ pad_inches=0,
731
+ facecolor='black'
732
+ )
733
+ plt.close()
734
+
735
+ else:
736
+ plt.savefig(save_plt)
737
+ plt.close()
738
+
739
+ if return_plt:
740
+ return fig
741
+
742
+ plt.show()
743
+ plt.close()
744
+
745
+ ################################################################################
746
+
747
+ def plot_cosine_similarities(lists_of_values,
748
+ plot_size=(7, 7),
749
+ save_plot=''
750
+ ):
751
+
752
+ """
753
+ Cosine similarities plot
754
+ """
755
+
756
+ cos_sim = metrics.pairwise_distances(lists_of_values, metric='cosine')
757
+
758
+ plt.figure(figsize=plot_size)
759
+
760
+ plt.imshow(cos_sim, cmap="inferno", interpolation="nearest")
761
+
762
+ im_ratio = cos_sim.shape[0] / cos_sim.shape[1]
763
+
764
+ plt.colorbar(fraction=0.046 * im_ratio, pad=0.04)
765
+
766
+ plt.xlabel("Index")
767
+ plt.ylabel("Index")
768
+
769
+ plt.tight_layout()
770
+
771
+ if save_plot != '':
772
+ plt.savefig(save_plot, bbox_inches="tight")
773
+ plt.close()
774
+
775
+ plt.show()
776
+ plt.close()
777
+
778
+ ################################################################################
779
+
780
+ def plot_points_with_mst_lines(points,
781
+ points_labels,
782
+ points_mst_edges,
783
+ plot_size=(20, 20),
784
+ labels_size=24,
785
+ save_plot=''
786
+ ):
787
+
788
+ """
789
+ Plots 2D points with labels and MST lines.
790
+ """
791
+
792
+ plt.figure(figsize=plot_size)
793
+
794
+ for i, label in enumerate(points_labels):
795
+ plt.scatter(points[i][0], points[i][1])
796
+ plt.annotate(label, (points[i][0], points[i][1]), fontsize=labels_size)
797
+
798
+ for edge in points_mst_edges:
799
+ i, j = edge
800
+ plt.plot([points[i][0], points[j][0]], [points[i][1], points[j][1]], 'k-', alpha=0.5)
801
+
802
+ plt.title('Points Map with MST Lines', fontsize=labels_size)
803
+ plt.xlabel('X-axis', fontsize=labels_size)
804
+ plt.ylabel('Y-axis', fontsize=labels_size)
805
+
806
+ if save_plot != '':
807
+ plt.savefig(save_plot, bbox_inches="tight")
808
+ plt.close()
809
+
810
+ plt.show()
811
+
812
+ plt.close()
813
+
814
+ ################################################################################
815
+
816
+ def plot_points_constellation(points,
817
+ points_labels,
818
+ p=5,
819
+ plot_size=(15, 15),
820
+ labels_size=12,
821
+ show_grid=False,
822
+ save_plot=''
823
+ ):
824
+
825
+ """
826
+ Plots 2D points constellation
827
+ """
828
+
829
+ points = np.array(points)
830
+
831
+ dist_matrix = distance_matrix(points, points, p=p)
832
+
833
+ mst = minimum_spanning_tree(dist_matrix).toarray()
834
+
835
+ plt.figure(figsize=plot_size)
836
+
837
+ plt.scatter(points[:, 0], points[:, 1], color='blue')
838
+
839
+ for i, label in enumerate(points_labels):
840
+ plt.annotate(label, (points[i, 0], points[i, 1]),
841
+ textcoords="offset points",
842
+ xytext=(0, 10),
843
+ ha='center',
844
+ fontsize=labels_size
845
+ )
846
+
847
+ for i in range(len(points)):
848
+ for j in range(len(points)):
849
+ if mst[i, j] > 0:
850
+ plt.plot([points[i, 0], points[j, 0]], [points[i, 1], points[j, 1]], 'k--')
851
+
852
+ plt.xlabel('X-axis', fontsize=labels_size)
853
+ plt.ylabel('Y-axis', fontsize=labels_size)
854
+ plt.title('2D Coordinates with Minimum Spanning Tree', fontsize=labels_size)
855
+
856
+ plt.grid(show_grid)
857
+
858
+ if save_plot != '':
859
+ plt.savefig(save_plot, bbox_inches="tight")
860
+ plt.close()
861
+
862
+ plt.show()
863
+
864
+ plt.close()
865
+
866
+ ################################################################################
867
+
868
+ def binary_matrix_to_images(matrix,
869
+ step,
870
+ overlap,
871
+ output_folder='./Dataset/',
872
+ output_img_prefix='image',
873
+ output_img_ext='.png',
874
+ save_to_array=False,
875
+ verbose=True
876
+ ):
877
+
878
+ if not save_to_array:
879
+
880
+ if verbose:
881
+ print('=' * 70)
882
+ print('Checking output folder dir...')
883
+
884
+ os.makedirs(os.path.dirname(output_folder), exist_ok=True)
885
+
886
+ if verbose:
887
+ print('Done!')
888
+
889
+ if verbose:
890
+ print('=' * 70)
891
+ print('Writing images...')
892
+
893
+ matrix = np.array(matrix, dtype=np.uint8)
894
+
895
+ image_array = []
896
+
897
+ for i in range(0, max(1, matrix.shape[0]), overlap):
898
+
899
+ submatrix = matrix[i:i+step, :]
900
+
901
+ if submatrix.shape[0] < 128:
902
+ zeros_array = np.zeros((128-submatrix.shape[0], 128))
903
+ submatrix = np.vstack((submatrix, zeros_array))
904
+
905
+ img = Image.fromarray(submatrix * 255).convert('1')
906
+
907
+ if save_to_array:
908
+ image_array.append(np.array(img))
909
+
910
+ else:
911
+ img.save(output_folder + output_img_prefix + '_' + str(matrix.shape[1]) + '_' + str(i).zfill(7) + output_img_ext)
912
+
913
+ if verbose:
914
+ print('Done!')
915
+ print('=' * 70)
916
+ print('Saved', (matrix.shape[0] // min(step, overlap))+1, 'imges!')
917
+ print('=' * 70)
918
+
919
+ if save_to_array:
920
+ return np.array(image_array).tolist()
921
+
922
+ ################################################################################
923
+
924
+ def images_to_binary_matrix(list_of_images):
925
+
926
+ image_array = np.array(list_of_images)
927
+
928
+ original_matrix = []
929
+
930
+ for img in image_array:
931
+
932
+ submatrix = np.array(img)
933
+ original_matrix.extend(submatrix.tolist())
934
+
935
+ return original_matrix
936
+
937
+ ################################################################################
938
+
939
+ def square_image_matrix(image_matrix,
940
+ matrix_size=128,
941
+ num_pca_components=5,
942
+ filter_out_zero_rows=False,
943
+ return_square_matrix_points=False
944
+ ):
945
+
946
+ """
947
+ Reduces an arbitrary image matrix to a square image matrix
948
+ """
949
+
950
+ matrix = np.array(image_matrix)
951
+
952
+ if filter_out_zero_rows:
953
+ matrix = matrix[~np.all(matrix == 0, axis=1)]
954
+
955
+ target_rows = matrix_size
956
+
957
+ rows_per_group = matrix.shape[0] // target_rows
958
+
959
+ compressed_matrix = np.zeros((target_rows, matrix.shape[1]), dtype=np.int32)
960
+
961
+ for i in range(target_rows):
962
+ start_row = i * rows_per_group
963
+ end_row = (i + 1) * rows_per_group
964
+ group = matrix[start_row:end_row, :]
965
+
966
+ pca = PCA(n_components=num_pca_components)
967
+ pca.fit(group)
968
+
969
+ principal_component = np.mean(pca.components_, axis=0)
970
+ contributions = np.dot(group, principal_component)
971
+ selected_row_index = np.argmax(contributions)
972
+
973
+ compressed_matrix[i, :] = group[selected_row_index, :]
974
+
975
+ if return_square_matrix_points:
976
+ filtered_matrix = compressed_matrix[~np.all(compressed_matrix == 0, axis=1)]
977
+
978
+ row_indexes, col_indexes = np.where(filtered_matrix != 0)
979
+ points = np.column_stack((row_indexes, filtered_matrix[row_indexes, col_indexes])).tolist()
980
+
981
+ return points
982
+
983
+ else:
984
+ return compressed_matrix.tolist()
985
+
986
+ ################################################################################
987
+
988
+ def image_matrix_to_images(image_matrix,
989
+ step,
990
+ overlap,
991
+ num_img_channels=3,
992
+ output_folder='./Dataset/',
993
+ output_img_prefix='image',
994
+ output_img_ext='.png',
995
+ save_to_array=False,
996
+ verbose=True
997
+ ):
998
+
999
+ if num_img_channels > 1:
1000
+ n_mat_channels = 3
1001
+
1002
+ else:
1003
+ n_mat_channels = 1
1004
+
1005
+ if not save_to_array:
1006
+
1007
+ if verbose:
1008
+ print('=' * 70)
1009
+ print('Checking output folder dir...')
1010
+
1011
+ os.makedirs(os.path.dirname(output_folder), exist_ok=True)
1012
+
1013
+ if verbose:
1014
+ print('Done!')
1015
+
1016
+ if verbose:
1017
+ print('=' * 70)
1018
+ print('Writing images...')
1019
+
1020
+ matrix = np.array(image_matrix)
1021
+
1022
+ image_array = []
1023
+
1024
+ for i in range(0, max(1, matrix.shape[0]), overlap):
1025
+
1026
+ submatrix = matrix[i:i+step, :]
1027
+
1028
+ if submatrix.shape[0] < 128:
1029
+ zeros_array = np.zeros((128-submatrix.shape[0], 128))
1030
+ submatrix = np.vstack((submatrix, zeros_array))
1031
+
1032
+ if n_mat_channels == 3:
1033
+
1034
+ r = (submatrix // (256*256)) % 256
1035
+ g = (submatrix // 256) % 256
1036
+ b = submatrix % 256
1037
+
1038
+ rgb_image = np.stack((r, g, b), axis=-1).astype(np.uint8)
1039
+ img = Image.fromarray(rgb_image, 'RGB')
1040
+
1041
+ else:
1042
+ grayscale_image = submatrix.astype(np.uint8)
1043
+ img = Image.fromarray(grayscale_image, 'L')
1044
+
1045
+ if save_to_array:
1046
+ image_array.append(np.array(img))
1047
+
1048
+ else:
1049
+ img.save(output_folder + output_img_prefix + '_' + str(matrix.shape[1]) + '_' + str(i).zfill(7) + output_img_ext)
1050
+
1051
+ if verbose:
1052
+ print('Done!')
1053
+ print('=' * 70)
1054
+ print('Saved', (matrix.shape[0] // min(step, overlap))+1, 'imges!')
1055
+ print('=' * 70)
1056
+
1057
+ if save_to_array:
1058
+ return np.array(image_array).tolist()
1059
+
1060
+ ################################################################################
1061
+
1062
+ def images_to_image_matrix(list_of_images,
1063
+ num_img_channels=3
1064
+ ):
1065
+
1066
+ if num_img_channels > 1:
1067
+ n_mat_channels = 3
1068
+
1069
+ else:
1070
+ n_mat_channels = 1
1071
+
1072
+ image_array = np.array(list_of_images)
1073
+
1074
+ original_matrix = []
1075
+
1076
+ for img in image_array:
1077
+
1078
+ if num_img_channels == 3:
1079
+
1080
+ rgb_array = np.array(img)
1081
+
1082
+ matrix = (rgb_array[..., 0].astype(np.int64) * 256*256 +
1083
+ rgb_array[..., 1].astype(np.int64) * 256 +
1084
+ rgb_array[..., 2].astype(np.int64))
1085
+
1086
+ else:
1087
+ matrix = np.array(img)
1088
+
1089
+ original_matrix.extend(matrix)
1090
+
1091
+ return original_matrix
1092
+
1093
+ ################################################################################
1094
+ # [WIP] Future dev functions
1095
+ ################################################################################
1096
+
1097
+ '''
1098
+ import umap
1099
+
1100
+ def reduce_dimensionality_umap(list_of_values,
1101
+ n_comp=2,
1102
+ n_neighbors=15,
1103
+ ):
1104
+
1105
+ """
1106
+ Reduces the dimensionality of the values using UMAP.
1107
+ """
1108
+
1109
+ vals = np.array(list_of_values)
1110
+
1111
+ umap_reducer = umap.UMAP(n_components=n_comp,
1112
+ n_neighbors=n_neighbors,
1113
+ n_epochs=5000,
1114
+ verbose=True
1115
+ )
1116
+
1117
+ reduced_vals = umap_reducer.fit_transform(vals)
1118
+
1119
+ return reduced_vals.tolist()
1120
+ '''
1121
+
1122
+ ################################################################################
1123
+
1124
+ '''
1125
+ import alphashape
1126
+ from shapely.geometry import Point
1127
+ from matplotlib.tri import Triangulation, LinearTriInterpolator
1128
+ from scipy.stats import zscore
1129
+
1130
+ #===============================================================================
1131
+
1132
+ coordinates = points
1133
+
1134
+ dist_matrix = minkowski_distance_matrix(coordinates, p=3) # You can change the value of p as needed
1135
+
1136
+ # Centering matrix
1137
+ n = dist_matrix.shape[0]
1138
+ H = np.eye(n) - np.ones((n, n)) / n
1139
+
1140
+ # Apply double centering
1141
+ B = -0.5 * H @ dist_matrix**2 @ H
1142
+
1143
+ # Eigen decomposition
1144
+ eigvals, eigvecs = np.linalg.eigh(B)
1145
+
1146
+ # Sort eigenvalues and eigenvectors
1147
+ idx = np.argsort(eigvals)[::-1]
1148
+ eigvals = eigvals[idx]
1149
+ eigvecs = eigvecs[:, idx]
1150
+
1151
+ # Select the top 2 eigenvectors
1152
+ X_transformed = eigvecs[:, :2] * np.sqrt(eigvals[:2])
1153
+
1154
+ #===============================================================================
1155
+
1156
+ src_points = X_transformed
1157
+ src_values = np.array([[p[1]] for p in points]) #np.random.rand(X_transformed.shape[0])
1158
+
1159
+ #===============================================================================
1160
+
1161
+ # Normalize the points to the range [0, 1]
1162
+ scaler = MinMaxScaler()
1163
+ points_normalized = scaler.fit_transform(src_points)
1164
+
1165
+ values_normalized = custom_normalize(src_values)
1166
+
1167
+ # Remove outliers based on z-score
1168
+ z_scores = np.abs(zscore(points_normalized, axis=0))
1169
+ filtered_points = points_normalized[(z_scores < 3).all(axis=1)]
1170
+ filtered_values = values_normalized[(z_scores < 3).all(axis=1)]
1171
+
1172
+ # Compute the concave hull (alpha shape)
1173
+ alpha = 8 # Adjust alpha as needed
1174
+ hull = alphashape.alphashape(filtered_points, alpha)
1175
+
1176
+ # Create a triangulation
1177
+ tri = Triangulation(filtered_points[:, 0], filtered_points[:, 1])
1178
+
1179
+ # Interpolate the values on the triangulation
1180
+ interpolator = LinearTriInterpolator(tri, filtered_values[:, 0])
1181
+ xi, yi = np.meshgrid(np.linspace(0, 1, 100), np.linspace(0, 1, 100))
1182
+ zi = interpolator(xi, yi)
1183
+
1184
+ # Mask out points outside the concave hull
1185
+ mask = np.array([hull.contains(Point(x, y)) for x, y in zip(xi.flatten(), yi.flatten())])
1186
+ zi = np.ma.array(zi, mask=~mask.reshape(zi.shape))
1187
+
1188
+ # Plot the filled contour based on the interpolated values
1189
+ plt.contourf(xi, yi, zi, levels=50, cmap='viridis')
1190
+
1191
+ # Plot the original points
1192
+ #plt.scatter(filtered_points[:, 0], filtered_points[:, 1], c=filtered_values, edgecolors='k')
1193
+
1194
+ plt.title('Filled Contour Plot with Original Values')
1195
+ plt.xlabel('X-axis')
1196
+ plt.ylabel('Y-axis')
1197
+ plt.colorbar(label='Value')
1198
+ plt.show()
1199
+ '''
1200
+
1201
+ ################################################################################
1202
+ #
1203
+ # This is the end of TPLOTS Python modules
1204
+ #
1205
+ ################################################################################
midi_images_solo_piano_dataset_maker.py ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """MIDI_Images_Solo_Piano_Dataset_Maker.ipynb
3
+
4
+ Automatically generated by Colab.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/15E6o3Y1xPific5RtIZ-1CneHQhts3eEr
8
+
9
+ # MIDI Images Solo Piano Dataset Maker (ver. 1.0)
10
+
11
+ ***
12
+
13
+ Powered by tegridy-tools: https://github.com/asigalov61/tegridy-tools
14
+
15
+ ***
16
+
17
+ #### Project Los Angeles
18
+
19
+ #### Tegridy Code 2024
20
+
21
+ ***
22
+
23
+ # (SETUP ENVIRONMENT)
24
+ """
25
+
26
+ # @title Install dependecies
27
+ !git clone --depth 1 https://github.com/asigalov61/tegridy-tools
28
+
29
+ # Commented out IPython magic to ensure Python compatibility.
30
+ #@title Import all needed modules
31
+
32
+ print('=' * 70)
33
+ print('Loading core modules...')
34
+ print('Please wait...')
35
+ print('=' * 70)
36
+
37
+ import os
38
+ import copy
39
+ import math
40
+ import statistics
41
+ import random
42
+ import pickle
43
+ import shutil
44
+ from itertools import groupby
45
+ from collections import Counter
46
+ from sklearn.metrics import pairwise_distances
47
+ from sklearn import metrics
48
+ from joblib import Parallel, delayed, parallel_config
49
+ import numpy as np
50
+ from tqdm import tqdm
51
+ from PIL import Image
52
+ import matplotlib.pyplot as plt
53
+
54
+ print('Done!')
55
+ print('=' * 70)
56
+ print('Creating I/O dirs...')
57
+
58
+ if not os.path.exists('/content/Dataset'):
59
+ os.makedirs('/content/Dataset')
60
+
61
+ print('Done!')
62
+ print('=' * 70)
63
+ print('Loading tegridy-tools modules...')
64
+ print('=' * 70)
65
+
66
+ # %cd /content/tegridy-tools/tegridy-tools
67
+
68
+ import TMIDIX
69
+ import TMELODIES
70
+ import TPLOTS
71
+ import HaystackSearch
72
+
73
+ # %cd /content/
74
+
75
+ print('=' * 70)
76
+ print('Done!')
77
+ print('=' * 70)
78
+
79
+ """# (DOWNLOAD SAMPLE MIDI DATASET)"""
80
+
81
+ # Commented out IPython magic to ensure Python compatibility.
82
+ # @title Download sample MIDI dataset (POP909)
83
+ # %cd /content/Dataset/
84
+ !git clone --depth 1 https://github.com/music-x-lab/POP909-Dataset
85
+ # %cd /content/
86
+
87
+ #@title Save file list
88
+ ###########
89
+
90
+ print('=' * 70)
91
+ print('Loading MIDI files...')
92
+ print('This may take a while on a large dataset in particular...')
93
+
94
+ dataset_addr = '/content/Dataset/'
95
+
96
+ # os.chdir(dataset_addr)
97
+ filez = list()
98
+ for (dirpath, dirnames, filenames) in os.walk(dataset_addr):
99
+ filez += [os.path.join(dirpath, file) for file in filenames if file.endswith('.mid') or file.endswith('.midi') or file.endswith('.kar')]
100
+ print('=' * 70)
101
+
102
+ if filez == []:
103
+ print('Could not find any MIDI files. Please check Dataset dir...')
104
+ print('=' * 70)
105
+
106
+ print('Randomizing file list...')
107
+ random.shuffle(filez)
108
+ print('Done!')
109
+ print('=' * 70)
110
+ print('Total found MIDI files:', len(filez))
111
+ print('=' * 70)
112
+
113
+ TMIDIX.Tegridy_Any_Pickle_File_Writer(filez, 'filez')
114
+
115
+ print('=' * 70)
116
+
117
+ """# (LOAD TMIDIX MIDI PROCESSOR)"""
118
+
119
+ #@title Load TMIDIX MIDI processor
120
+
121
+ print('=' * 70)
122
+ print('TMIDIX MIDI Processor')
123
+ print('=' * 70)
124
+ print('Loading...')
125
+
126
+ ###########
127
+
128
+ def TMIDIX_MIDI_Processor(midi_file):
129
+
130
+ fn = os.path.basename(midi_file)
131
+ fn1 = fn.split('.mid')[0]
132
+
133
+ try:
134
+
135
+ #=======================================================
136
+ # START PROCESSING
137
+
138
+ raw_score = TMIDIX.midi2single_track_ms_score(midi_file)
139
+
140
+ escore_notes = TMIDIX.advanced_score_processor(raw_score, return_enhanced_score_notes=True)[0]
141
+
142
+ escore_notes = TMIDIX.augment_enhanced_score_notes(escore_notes, timings_divider=256)
143
+
144
+ sp_escore_notes = TMIDIX.recalculate_score_timings(TMIDIX.solo_piano_escore_notes(escore_notes, keep_drums=False))
145
+
146
+ if sp_escore_notes:
147
+
148
+ bmatrix = TMIDIX.escore_notes_to_binary_matrix(sp_escore_notes)
149
+
150
+ return [fn1, bmatrix]
151
+
152
+ else:
153
+ return [fn1, []]
154
+
155
+ #=======================================================
156
+
157
+ except Exception as ex:
158
+ print('WARNING !!!')
159
+ print('=' * 70)
160
+ print('Bad MIDI:', midi_file)
161
+ print('Error detected:', ex)
162
+ print('=' * 70)
163
+ return None
164
+
165
+ print('Done!')
166
+ print('=' * 70)
167
+
168
+ """# (PROCESS MIDIs)"""
169
+
170
+ #@title Process MIDIs with TMIDIX MIDI processor
171
+ output_folder = "/content/MIDI-Images/" # @param {"type":"string"}
172
+
173
+ NUMBER_OF_PARALLEL_JOBS = 4 # Number of parallel jobs
174
+ NUMBER_OF_FILES_PER_ITERATION = 4 # Number of files to queue for each parallel iteration
175
+ SAVE_EVERY_NUMBER_OF_ITERATIONS = 128 # Save every 2560 files
176
+
177
+ print('=' * 70)
178
+ print('TMIDIX MIDI Processor')
179
+ print('=' * 70)
180
+ print('Starting up...')
181
+ print('=' * 70)
182
+
183
+ ###########
184
+
185
+ melody_chords_f = []
186
+
187
+ files_count = 0
188
+
189
+ print('Processing MIDI files...')
190
+ print('Please wait...')
191
+ print('=' * 70)
192
+
193
+ for i in tqdm(range(0, len(filez), NUMBER_OF_FILES_PER_ITERATION)):
194
+
195
+ with parallel_config(backend='threading', n_jobs=NUMBER_OF_PARALLEL_JOBS, verbose = 0):
196
+
197
+ output = Parallel(n_jobs=NUMBER_OF_PARALLEL_JOBS, verbose=0)(delayed(TMIDIX_MIDI_Processor)(f) for f in filez[i:i+NUMBER_OF_FILES_PER_ITERATION])
198
+
199
+ for o in output:
200
+
201
+ if o is not None:
202
+ melody_chords_f.append(o)
203
+
204
+ if i % (NUMBER_OF_FILES_PER_ITERATION * SAVE_EVERY_NUMBER_OF_ITERATIONS) == 0 and i != 0:
205
+
206
+ print('SAVING !!!')
207
+ print('=' * 70)
208
+ print('Saving processed files...')
209
+ files_count += len(melody_chords_f)
210
+ print('=' * 70)
211
+ print('Processed so far:', files_count, 'out of', len(filez), '===', files_count / len(filez), 'good files ratio')
212
+ print('=' * 70)
213
+ print('Writing images...')
214
+ print('Please wait...')
215
+
216
+ for mat in melody_chords_f:
217
+
218
+ if mat[1]:
219
+
220
+ TPLOTS.binary_matrix_to_images(mat[1],
221
+ 128,
222
+ 32,
223
+ output_folder=output_folder+str(mat[0])+'/',
224
+ output_img_prefix=str(mat[0]),
225
+ output_img_ext='.png',
226
+ verbose=False
227
+ )
228
+
229
+ print('Done!')
230
+ print('=' * 70)
231
+ melody_chords_f = []
232
+
233
+ print('SAVING !!!')
234
+ print('=' * 70)
235
+ print('Saving processed files...')
236
+ files_count += len(melody_chords_f)
237
+ print('=' * 70)
238
+ print('Processed so far:', files_count, 'out of', len(filez), '===', files_count / len(filez), 'good files ratio')
239
+ print('=' * 70)
240
+ print('Writing images...')
241
+ print('Please wait...')
242
+
243
+ for mat in melody_chords_f:
244
+
245
+ if mat[1]:
246
+
247
+ TPLOTS.binary_matrix_to_images(mat[1],
248
+ 128,
249
+ 32,
250
+ output_folder=output_folder+str(mat[0])+'/',
251
+ output_img_prefix=str(mat[0]),
252
+ output_img_ext='.png',
253
+ verbose=False
254
+ )
255
+
256
+ print('Done!')
257
+ print('=' * 70)
258
+
259
+ """# (LOAD IMAGES)"""
260
+
261
+ #@title Load created MIDI images
262
+ full_path_to_metadata_pickle_files = "/content/MIDI-Images" #@param {type:"string"}
263
+
264
+ print('=' * 70)
265
+ print('MIDI Images Reader')
266
+ print('=' * 70)
267
+ print('Searching for images...')
268
+
269
+ filez = list()
270
+ for (dirpath, dirnames, filenames) in os.walk(full_path_to_metadata_pickle_files):
271
+ filez += [os.path.join(dirpath, file) for file in filenames if file.endswith('.png')]
272
+ print('=' * 70)
273
+
274
+ filez.sort()
275
+
276
+ print('Found', len(filez), 'images!')
277
+ print('=' * 70)
278
+ print('Reading images...')
279
+ print('Please wait...')
280
+ print('=' * 70)
281
+
282
+ fidx = 0
283
+
284
+ all_read_images = []
285
+
286
+ for img in tqdm(filez):
287
+
288
+ img = Image.open(img)
289
+
290
+ img_arr = np.array(img).tolist()
291
+
292
+ all_read_images.append(img_arr)
293
+
294
+ fidx += 1
295
+
296
+ print('Done!')
297
+ print('=' * 70)
298
+ print('Loaded', fidx, 'images!')
299
+ print('=' * 70)
300
+ print('Done!')
301
+ print('=' * 70)
302
+
303
+ """# (TEST IMAGES)"""
304
+
305
+ # @title Test created MIDI images
306
+
307
+ print('=' * 70)
308
+
309
+ image = random.choice(all_read_images)
310
+
311
+ escore = TMIDIX.binary_matrix_to_original_escore_notes(image)
312
+
313
+ output_score, patches, overflow_patches = TMIDIX.patch_enhanced_score_notes(escore)
314
+
315
+ detailed_stats = TMIDIX.Tegridy_ms_SONG_to_MIDI_Converter(output_score,
316
+ output_signature = 'MIDI Images',
317
+ output_file_name = '/content/MIDI-Images-Composition',
318
+ track_name='Project Los Angeles',
319
+ list_of_MIDI_patches=patches,
320
+ timings_multiplier=256
321
+ )
322
+
323
+ print('=' * 70)
324
+
325
+ """# (ZIP IMAGES)"""
326
+
327
+ # @title Zip created MIDI images
328
+ !zip -9 -r POP909_MIDI_Images_128_128_32_BW.zip MIDI-Images/ > /dev/null
329
+
330
+ """# Congrats! You did it! :)"""