Commit 8f28cac0 authored by Anthony Larcher's avatar Anthony Larcher
Browse files

cleaning

parent be028f64
......@@ -482,13 +482,12 @@ class FeaturesServer(object):
label=label,
start=start, stop=stop,
global_cmvn=self.global_cmvn)
# Post-process the features and return the features and vad label
if global_cmvn:
feat, label = self.post_processing(feat, label, global_mean, global_std)
else:
feat, label = self.post_processing(feat, label)
if mask is not None:
if self.mask is not None:
feat = feat[:, self.mask]
return feat, label
......
......@@ -586,7 +586,6 @@ def read_hdf5_segment(file_handler,
stop = dataset_length
pad_end = stop - dataset_length if stop > dataset_length else 0
stop = min(stop, dataset_length)
global_cmvn = global_cmvn and not (start is None or stop is None)
# Get the data between start and stop
......@@ -601,7 +600,10 @@ def read_hdf5_segment(file_handler,
if "/".join((show, data_id)) in h5f:
dataset_id = show + '/{}'.format(data_id)
if compression == 'none':
feat.append(_read_segment(h5f, dataset_id, start, stop))
data = _read_segment(h5f, dataset_id, start, stop)
if data.ndim ==1:
data = data[:, numpy.newaxis]
feat.append(data)
elif compression == 'htk':
feat.append(_read_segment_htk(h5f, dataset_id, start, stop))
else:
......@@ -995,8 +997,9 @@ def _read_dataset(h5f, dataset_id):
data = data[:, numpy.newaxis]
return data
def _read_segment(h5f, dataset_id, e, s):
return h5f[dataset_id][s:e, :]
def _read_segment(h5f, dataset_id, s, e):
data = h5f[dataset_id][s:e]
return data
def _read_dataset_htk(h5f, dataset_id):
(A, B) = h5f[dataset_id + "comp"].value
......
......@@ -423,8 +423,6 @@ class FForwardNetwork():
features, _ = dnn_features_server.load(seg)
stat_features, labels = features_server.load(seg)
#s0 = self.forward(torch.from_numpy(
# dnn_features_server.get_context(feat=features)[0]).type(torch.FloatTensor).to(device))[labels]
s0 = self.forward(torch.from_numpy(
dnn_features_server.get_context(feat=features)[0][labels]).type(torch.FloatTensor).to(device))
stat_features = stat_features[labels, :]
......@@ -473,7 +471,6 @@ class FForwardNetwork():
"""
model.cpu()
for idx in seg_indices:
print("Compute statistics for {}".format(segset[idx]))
logging.debug('Compute statistics for {}'.format(segset[idx]))
show = segset[idx]
......@@ -520,11 +517,10 @@ class FForwardNetwork():
ss.stat1 = numpy.zeros((idmap.leftids.shape[0], ndim * feature_size), dtype=numpy.float32)
self.model.cpu()
for idx in numpy.arange(len(idmap.segset)):
print("Compute statistics for {}".format(idmap.segset[idx]))
logging.debug('Compute statistics for {}'.format(idmap.segset[idx]))
for idx in numpy.arange(len(idmap.rightids)):
logging.debug('Compute statistics for {}'.format(idmap.rightids[idx]))
show = idmap.segset[idx]
show = idmap.rightids[idx]
channel = 0
if features_server.features_extractor is not None \
and show.endswith(features_server.double_channel_extension[1]):
......@@ -533,8 +529,10 @@ class FForwardNetwork():
features, _ = dnn_features_server.load(show, channel=channel)
stat_features = stat_features[labels, :]
s0 = self.model(torch.from_numpy(dnn_features_server.get_context(feat=features)[0]).type(torch.FloatTensor).cpu())[labels]
s0.cpu().data.numpy()
s0 = self.model(torch.from_numpy(
dnn_features_server.get_context(feat=features)[0][labels]).type(torch.FloatTensor).cpu())
s0 = s0.cpu().data.numpy()
s1 = numpy.dot(stat_features.T, s0).T
ss.stat0[idx, :] = s0.sum(axis=0)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment