Commit 817431b6 authored by Anthony Larcher's avatar Anthony Larcher
Browse files

add BN in before embedding

parent 620cedb7
......@@ -511,8 +511,15 @@ class Xtractor(torch.nn.Module):
n_mels=80)
self.sequence_network = PreHalfResNet34()
self.embedding_size = 256
self.before_speaker_embedding = torch.nn.Linear(in_features = 5120,
out_features = self.embedding_size)
self.before_speaker_embedding = torch.nn.Sequential(OrderedDict([
("lin_be", torch.nn.Linear(in_features = 5120,
out_features = self.embedding_size)),
("bn_be", torch.nn.BatchNorm1d(self.embedding_size))
]))
#self.before_speaker_embedding = torch.nn.Linear(in_features = 5120,
# out_features = self.embedding_size)
self.stat_pooling = AttentivePooling(256, 80, global_context=True)
self.loss = loss
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment