Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Speaker
sidekit
Commits
3096a152
Commit
3096a152
authored
May 16, 2022
by
Colleen Beaumard
Browse files
Scheduler losses now are plotted
parent
75fdd313
Changes
1
Hide whitespace changes
Inline
Side-by-side
egs/iemocap/local/scoring.py
View file @
3096a152
...
...
@@ -8,6 +8,7 @@ import numpy as np
import
torchaudio
import
sklearn.metrics
as
metrics
import
matplotlib.pyplot
as
plt
from
matplotlib.ticker
import
FormatStrFormatter
from
sidekit.nnet.xvector
import
Xtractor
import
argparse
...
...
@@ -158,7 +159,7 @@ fil.close()
# Search for "Loss:", "Validation Loss", "Epoch" and "reducing" in all lines
valid_loss
=
[
line
for
line
in
file
if
"Validation Loss"
in
line
]
lr_scheduler
=
[
float
(
line
.
rsplit
(
":"
,
1
)[
1
].
replace
(
"
\n
"
,
""
))
for
line
in
file
if
"Scheduer"
in
line
]
# "Loss:" and "Epoch" in same line
if
model_type
==
"custom"
:
loss_epoch
=
[
line
for
line
in
file
if
"Epoch"
in
line
]
...
...
@@ -183,6 +184,7 @@ for linev, linel in zip(valid_loss, loss_epoch):
vacc
.
append
(
round
(
float
(
linev
[
1
].
split
(
"%"
)[
0
].
replace
(
" "
,
""
)),
2
))
aepoch
.
append
(
linel
[
3
].
split
(
" "
)[
1
])
tloss
.
append
(
round
(
float
(
linel
[
4
].
split
(
"
\t
"
)[
0
].
replace
(
" "
,
""
)),
2
))
assert
len
(
aepoch
)
==
len
(
tloss
)
==
len
(
vloss
)
==
len
(
vacc
)
print
(
"--------------
\n
"
)
...
...
@@ -203,7 +205,7 @@ if not os.path.isdir(path.rsplit("/", 1)[0]):
if
not
os
.
path
.
isdir
(
path
):
os
.
mkdir
(
path
)
# Plot confusion matrice
#
# Plot confusion matrice
##
sns
.
heatmap
(
confMatrix
,
annot
=
annot
,
fmt
=
"10"
,
cmap
=
"Blues"
,
vmin
=
0
,
vmax
=
350
,
xticklabels
=
labels
,
yticklabels
=
labels
)
plt
.
title
(
"Model: {}{}{}_"
.
format
(
model_type
,
model_2nd
,
freeze
)
+
str
(
config
[
"speaker_number"
])
+
"emo_{}batch
\n
epoch: {} lr: {} Data: Test-IEMOCAP {}"
.
format
(
nb_batch
,
aepoch
[
-
1
],
lr
,
ses_nb
)
+
" UAR = "
+
str
(
UARPercent
)
+
"%"
)
plt
.
xlabel
(
"Prediction"
)
...
...
@@ -211,26 +213,21 @@ plt.ylabel("Ground truth")
plt
.
savefig
(
os
.
path
.
join
(
path
,
"confusion_matrix_{}{}{}_"
.
format
(
model_type
,
model_2nd
,
freeze
)
+
str
(
config
[
"speaker_number"
])
+
"emo_{}batch_epoch-{}_lr-{}_Test-IEMOCAP{}.png"
.
format
(
nb_batch
,
aepoch
[
-
1
],
lr
,
ses_nb
)))
plt
.
show
()
plt
.
clf
()
print
(
"
\n
Confusion matrix
done
!"
)
print
(
"
\n
Confusion matrix
plotted
!"
)
# Plot losses
fig
,
ax
=
plt
.
subplots
()
twin
=
ax
.
twinx
()
# Add a 3rd axis on the right
## Plot losses ##
fig
,
axs
=
plt
.
subplots
(
2
,
1
,
constrained_layout
=
True
)
ax
=
axs
[
0
]
twin
=
ax
.
twinx
()
ax2
=
axs
[
1
]
# 1st subplot #
plot_tl
,
=
ax
.
plot
(
tloss
,
label
=
"Training loss"
)
plot_vl
,
=
ax
.
plot
(
vloss
,
label
=
"Validation loss"
)
plot_vac
,
=
twin
.
plot
(
vacc
,
"g-."
,
label
=
"Validation accuracy"
)
handles
=
[
plot_tl
,
plot_vl
,
plot_vac
]
# For the legend
handles
=
[
plot_tl
,
plot_vl
,
plot_vac
]
ax
.
set_xlabel
(
"Epochs"
)
ax
.
set_ylabel
(
"CrossEntropyLoss()"
)
twin
.
set_ylabel
(
"Validation accuracy (%)"
)
twin
.
set_ylim
(
0
,
100
)
ax
.
set_ylim
(
0
,
9
)
ax
.
set_xlim
(
0
,
len
(
aepoch
))
# Change of learning rate during the training (can be disused)
if
len
(
reduce_lr_list
)
!=
0
:
reduce_lr_list
=
[
re
.
sub
(
r
"\s+"
,
" "
,
elmt
)
for
elmt
in
reduce_lr_list
]
reduce_lr_list
=
[
elmt
.
split
(
" "
)
for
elmt
in
reduce_lr_list
]
...
...
@@ -243,8 +240,32 @@ if len(reduce_lr_list) != 0:
handles
.
append
(
plt
.
axvline
(
x
=
key
,
color
=
colors
[
i
],
linestyle
=
'--'
,
label
=
label
))
i
+=
1
# 2nd subplot #
plot_lr
=
ax2
.
plot
(
lr_scheduler
,
"m"
)
maxlr
,
minlr
=
max
(
lr_scheduler
),
min
(
lr_scheduler
)
# Used for legend
maxlr_index
,
minlr_index
=
lr_scheduler
.
index
(
maxlr
),
lr_scheduler
.
index
(
minlr
)
# Used for axvline
lr_max
=
ax2
.
axvline
(
x
=
maxlr_index
,
color
=
"r"
,
linestyle
=
'--'
,
label
=
"Max: {}"
.
format
(
format
((
maxlr
),
".1e"
)))
lr_min
=
ax2
.
axvline
(
x
=
minlr_index
,
color
=
"b"
,
linestyle
=
'--'
,
label
=
"Min: {}"
.
format
(
format
((
minlr
),
".1e"
)))
handles_lr
=
[
lr_max
,
lr_min
]
# For the legend
ax2
.
legend
(
handles
=
handles_lr
)
# Show the legend
# General settings #
# 1st subplot #
ax
.
set_xlabel
(
"Epochs"
)
ax
.
set_ylabel
(
"CrossEntropyLoss()"
)
twin
.
set_ylabel
(
"Validation accuracy (%)"
)
twin
.
set_ylim
(
0
,
100
)
ax
.
set_ylim
(
0
,
9
)
ax
.
set_xlim
(
0
,
len
(
aepoch
))
ax
.
legend
(
handles
=
handles
)
#plt.gca().xaxis.set_major_locator(matplotlib.ticker.MaxNLocator(20))
# 2nd subplot #
ax2
.
set_title
(
"Scheduler learning rate"
)
ax2
.
get_xaxis
().
set_visible
(
False
)
# Do not show the x axis
ax2
.
yaxis
.
set_major_formatter
(
FormatStrFormatter
(
'%.e'
))
# Format of the y ticks
# Figure #
plt
.
title
(
"Model: {}{}{}_{}emo_{}batch
\n
Epoch: {} lr: {} Data: Test-IEMOCAP {}"
.
format
(
model_type
,
model_2nd
,
freeze
,
cates
,
nb_batch
,
aepoch
[
-
1
],
lr
,
ses_nb
))
plt
.
savefig
(
os
.
path
.
join
(
path
,
"losses_{}{}{}_{}emo_{}batch_epoch-{}_lr-{}_Test-IEMOCAP{}.png"
.
format
(
model_type
,
model_2nd
,
freeze
,
cates
,
nb_batch
,
aepoch
[
-
1
],
lr
,
ses_nb
)))
plt
.
show
()
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment