-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathTest.py
70 lines (52 loc) · 2.5 KB
/
Test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import torch
from LoadNoise import LoadData
import numpy as np
from torch.utils.data import DataLoader
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument('--data_path', type=str,
default='TIMIT/TRAIN', help="path for the data")
parser.add_argument('--noise_type', type=str,
default='babble', help="path for the data")
#JUST TO TEST THINGS OUT
#For the first stage of meta learning with one noise types and different SNR
#The training file will will be the same for all tasks, just have to create different dataloaders with the same noise type and different SNR
#For regular training with one noise type and different SNR
#training file has all the files with noise added at -6,-3,0,3,6 dB
#Testing files are the same for both since just need a new dataloader and specify what noise and SNR to test at
#meta_training has multiple noise types as an array so the last dimension will be 5
#regular training with only one noise type will have last dimension as 1
#SNR input only needs to be one value if you just want to train/test at one noise type
#tsv_file will be the meta_data file if u want to just add 1 noise type
#the tsv_file in regular training file has all noise types together
#same tsv_file since we are telling it what noise/snr to add
args = parser.parse_args()
data_path = args.data_path
noise_type = args.noise_type
noise_snr =[-6,-3,0,3,6]
meta_training_data = LoadData(tsv_file='dataset/meta_data/train/train.txt', clean_dir=data_path,SNR=noise_snr,noise=noise_type)
#dataloaders
#4610
meta_train_loader = DataLoader(meta_training_data,batch_size=4610,shuffle=True,num_workers=0)
path1_name = './spectograms_train30/noise/' + noise_type + '/train'
if not os.path.exists(path1_name):
os.makedirs(path1_name)
path2_name = './spectograms_train30/clean/train'
if not os.path.exists(path2_name):
os.makedirs(path2_name)
#looping through the dataloader. Pytorch dataloader automatically randomizes the batches and gives u a new batch each iteration
for i,batch in enumerate(meta_train_loader):
print('creating data....')
clean = batch['clean_mag']
noise = batch['noise_mag']
print(clean.shape)
print(noise.shape)
break
print('done...')
print(clean.shape)
np.save('spectograms_train30/clean/train/clean_frames_' + noise_type + '.npy', clean)
for s, snr in enumerate(noise_snr):
print(snr)
print(noise[:,:,:,:,s].shape)
np.save('spectograms_train30/noise/' + noise_type + '/train/noise_'+ str(snr) + '.npy', noise[:,:,:,:,s])