Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
lwc
/
compare
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Pipelines
Members
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
75eec6f5
authored
Mar 23, 2020
by
Enrico Pozzobon
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fixes for tests, .s to .S, other stuff
parent
67bcb93e
Hide whitespace changes
Inline
Side-by-side
Showing
16 changed files
with
491 additions
and
308 deletions
+491
-308
compile_all.py
+31
-36
index.html
+27
-15
parse_logic.py
+0
-123
process_zip.sh
+32
-6
templates/bluepill/configure
+8
-1
templates/bluepill/test
+5
-0
templates/esp32/configure
+7
-1
templates/esp32/test
+20
-19
templates/f7/configure
+6
-0
templates/f7/test
+3
-0
templates/maixduino/configure
+7
-1
templates/maixduino/test
+3
-0
templates/uno/configure
+7
-1
templates/uno/test
+12
-0
test-dude.py
+132
-75
test_common.py
+191
-30
No files found.
compile_all.py
View file @
75eec6f5
...
...
@@ -172,10 +172,17 @@ def main(argv):
if
n
not
in
include_list
:
continue
# Find date of last modification in directory
st_mtime
=
0
for
root
,
dirs
,
filess
in
os
.
walk
(
d
):
for
name
in
filess
:
path
=
os
.
path
.
join
(
root
,
name
)
st_mtime
=
max
(
st_mtime
,
os
.
stat
(
path
)
.
st_mtime
)
# Put all in a tuple and count
files
.
append
((
t
,
d
,
n
))
files
.
append
((
t
,
d
,
n
,
st_mtime
))
#
For testing, we only do the first
1
#
Uncomment next line for testing, if we only want to do
1
# files = files[:1]
print
(
"
%
d algorithms will be compiled"
%
len
(
files
))
...
...
@@ -184,41 +191,29 @@ def main(argv):
print
()
# Write a script that executes all the tests one after the other
test_script_path
=
os
.
path
.
join
(
build_root_dir
,
"test_all.sh"
)
with
open
(
test_script_path
,
'w'
)
as
test_script
:
test_script
.
write
(
"#!/bin/sh
\n
"
)
test_script
.
write
(
"mkdir -p logs
\n
"
)
test_script
.
write
(
"mkdir -p measurements
\n
"
)
for
i
,
(
t
,
d
,
name
)
in
enumerate
(
files
):
print
()
print
(
d
)
try
:
build_dir
=
os
.
path
.
join
(
build_root_dir
,
name
)
b
=
build
(
d
,
template_dir
,
build_dir
)
if
b
is
None
:
continue
test_script
.
write
(
"
\n\n
echo
\"
TEST NUMBER
%03
d: TESTING
%
s
\"\n
"
%
(
i
,
d
))
test_script
.
write
(
"python3 -u
%
s
%
s
%
s 2>
%
s | tee
%
s
\n
"
%
(
os
.
path
.
join
(
template_dir
,
'test'
),
os
.
path
.
join
(
b
,
'LWC_AEAD_KAT.txt'
),
b
,
os
.
path
.
join
(
b
,
'test_stderr.log'
),
os
.
path
.
join
(
b
,
'test_stdout.log'
))
)
shutil
.
copyfile
(
t
,
os
.
path
.
join
(
b
,
'LWC_AEAD_KAT.txt'
))
print
(
"COMPILATION SUCCESS FOR
%
s"
%
d
)
except
Exception
as
ex
:
print
(
"COMPILATION FAILED FOR
%
s"
%
d
)
print
(
ex
)
st
=
os
.
stat
(
test_script_path
)
os
.
chmod
(
test_script_path
,
st
.
st_mode
|
stat
.
S_IEXEC
)
# Build all found algorithms
for
i
,
(
t
,
d
,
name
,
st_mtime
)
in
enumerate
(
files
):
print
()
print
(
d
)
try
:
build_dir
=
os
.
path
.
join
(
build_root_dir
,
name
)
b
=
build
(
d
,
template_dir
,
build_dir
)
if
b
is
None
:
continue
shutil
.
copyfile
(
t
,
os
.
path
.
join
(
b
,
'LWC_AEAD_KAT.txt'
))
mdate_path
=
os
.
path
.
join
(
build_dir
,
'cipher_mtime.txt'
)
with
open
(
mdate_path
,
'wt'
)
as
mdate_file
:
print
(
int
(
st_mtime
),
file
=
mdate_file
)
print
(
"COMPILATION SUCCESS FOR
%
s"
%
d
)
except
Exception
as
ex
:
print
(
"COMPILATION FAILED FOR
%
s"
%
d
)
print
(
ex
)
print
()
print
()
...
...
index.html
View file @
75eec6f5
...
...
@@ -18,6 +18,10 @@ td {
margin
:
0px
;
}
td
.schedule-path
,
th
.schedule-path
{
width
:
20em
;
}
.iconButton
{
height
:
2.2em
;
width
:
2.2em
;
...
...
@@ -86,7 +90,7 @@ let menuView = null;
function
init
()
{
scheduleTable
.
style
.
display
=
'none'
;
document
.
body
.
appendChild
(
scheduleTable
);
scheduleTable
.
appendChild
(
makeRow
([
'ID'
,
'Created At'
,
'Path'
,
'Template'
,
'State'
,
'Actions'
],
'th'
));
scheduleTable
.
appendChild
(
make
Schedule
Row
([
'ID'
,
'Created At'
,
'Path'
,
'Template'
,
'State'
,
'Actions'
],
'th'
));
document
.
addEventListener
(
"keydown"
,
function
(
event
)
{
if
(
event
.
which
==
27
)
{
...
...
@@ -132,7 +136,7 @@ function onStatusGet(status) {
// Add rows for new incoming tasks
for
(
const
i
of
newTasksIds
)
{
const
row
=
makeRow
([
i
,
''
,
''
,
''
,
''
,
''
]);
const
row
=
make
Schedule
Row
([
i
,
''
,
''
,
''
,
''
,
''
]);
const
s
=
{
id
:
i
,
row
};
schedule
[
s
.
id
]
=
s
;
...
...
@@ -168,7 +172,8 @@ function onStatusGet(status) {
}
const
row
=
schedule
[
s
.
id
].
row
;
row
.
cells
[
1
].
innerText
=
s
.
added
;
row
.
cells
[
1
].
innerText
=
s
.
added
.
substr
(
0
,
16
)
+
'\n'
+
s
.
added
.
substr
(
17
);
row
.
cells
[
2
].
innerText
=
s
.
path
;
row
.
cells
[
3
].
innerText
=
s
.
template
;
row
.
cells
[
4
].
innerText
=
s
.
state
;
...
...
@@ -214,7 +219,7 @@ function showJobMenu(event, jobId) {
menuView
.
appendChild
(
makeEntry
(
'🗎 View Logs'
,
()
=>
viewJobLogs
(
jobId
)));
if
(
st
==
'SUCCESSFUL'
)
{
menuView
.
appendChild
(
makeEntry
(
'↓ Download results zip'
,
()
=>
getResultZip
(
jobId
)));
menuView
.
appendChild
(
makeEntry
(
'↓ Download results
sql'
,
()
=>
getResultSql
(
jobId
)));
menuView
.
appendChild
(
makeEntry
(
'↓ Download results
JSON'
,
()
=>
getResultJSON
(
jobId
)));
}
function
onLayout
()
{
...
...
@@ -222,15 +227,17 @@ function showJobMenu(event, jobId) {
// we have the size of the menu view, now we have to place it close to
// the bounding box of the button that was clicked, without putting it
// ouside the screen:
const
a
=
document
.
body
.
getBoundingClientRect
();
const
b
=
menuView
.
getBoundingClientRect
();
const
r
=
event
.
target
.
getBoundingClientRect
();
const
vh
=
window
.
innerHeight
||
document
.
documentElement
.
clientHeight
;
const
vw
=
window
.
innerWidth
||
document
.
documentElement
.
clientWidth
;
menuView
.
style
.
width
=
b
.
width
+
'px'
;
menuView
.
style
.
height
=
b
.
height
+
'px'
;
menuView
.
style
.
top
=
Math
.
min
(
r
.
y
,
vh
-
b
.
height
-
20
)
+
'px'
;
menuView
.
style
.
left
=
Math
.
min
(
r
.
x
,
vw
-
b
.
width
-
20
)
+
'px'
;
menuView
.
style
.
top
=
(
-
a
.
top
+
Math
.
min
(
r
.
y
,
vh
-
b
.
height
-
20
)
)
+
'px'
;
menuView
.
style
.
left
=
(
-
a
.
left
+
Math
.
min
(
r
.
x
,
vw
-
b
.
width
-
20
)
)
+
'px'
;
};
setTimeout
(()
=>
{
...
...
@@ -274,6 +281,11 @@ function getResultZip(jobId) {
}
function
getResultJSON
(
jobId
)
{
window
.
open
(
'/results/'
+
jobId
+
'/results.json'
);
}
function
viewJobLogs
(
jobId
)
{
const
logIds
=
[
0
,
1
,
2
,
3
];
const
logNames
=
[
'make stdout'
,
'make stderr'
,
'test stdout'
,
'test stderr'
];
...
...
@@ -365,16 +377,16 @@ function requestStatus() {
}
function makeCell(text, tagName='
td
') {
const cell = document.createElement(tagName);
cell.appendChild(document.createTextNode(text));
return cell;
};
function makeRow(list, cellTagName='
td
') {
function makeScheduleRow(list, cellTagName='
td
') {
const headerRow = document.createElement('
tr
');
(
list
.
map
((
n
)
=>
makeCell
(
n
,
cellTagName
))).
forEach
((
n
)
=>
headerRow
.
appendChild
(
n
));
const classes = ['
schedule
-
id
', '
schedule
-
date
', '
schedule
-
path
',
'
schedule
-
template
', '
schedule
-
state
', '
schedule
-
actions
'
];
(
list
.
map
((
text
,
idx
)
=>
{
const
cell
=
document
.
createElement
(
cellTagName
);
cell
.
className
=
classes
[
idx
];
cell
.
appendChild
(
document
.
createTextNode
(
text
));
return
cell
;
})).
forEach
((
n
)
=>
headerRow
.
appendChild
(
n
));
return
headerRow
;
};
...
...
parse_logic.py
deleted
100755 → 0
View file @
67bcb93e
#!/usr/bin/python3
import
os
import
sys
import
statistics
import
matplotlib.pyplot
as
plt
def
parse_capture
(
filename
):
f
=
open
(
'measurements/'
+
filename
)
# Skip the first two false positves (risky)
counter
=
-
1
lets_use_a_flag
=
False
six_ts
=
''
two_ts
=
''
enc_deltas
=
{}
dec_deltas
=
{}
for
l
in
f
.
readlines
():
l_array
=
l
.
split
(
" "
)
bit_field
=
l_array
[
-
1
][:
-
1
]
# l_array? That's the best you came up with?
if
bit_field
==
'2'
:
two_ts
=
l_array
[
0
][:
-
1
]
lets_use_a_flag
=
True
continue
if
lets_use_a_flag
:
if
bit_field
==
'6'
:
if
counter
<=
0
:
counter
=
counter
+
1
lets_use_a_flag
=
False
continue
six_ts
=
l_array
[
0
][:
-
1
]
delta
=
float
(
six_ts
)
-
float
(
two_ts
)
if
counter
%
2
==
1
:
enc_deltas
[(
counter
+
1
)
/
2
]
=
delta
else
:
dec_deltas
[
counter
/
2
]
=
delta
counter
=
counter
+
1
else
:
lets_use_a_flag
=
False
f
.
close
()
return
(
enc_deltas
,
dec_deltas
)
def
read_log
(
d
):
# Soo readlines, much efficient
f
=
open
(
d
+
'/test_stdout.log'
,
'r'
)
content
=
f
.
readlines
()
are_we_happy
=
content
[
-
1
]
.
split
(
' '
)[
-
1
]
if
are_we_happy
!=
'SUCCESSFUL
\n
'
:
print
(
"Test unsuccesful or log file structure corrupted"
)
return
# I like to split it, split it
path
=
content
[
0
]
.
split
(
' '
)[
-
1
]
.
split
(
'/'
)
if
path
[
-
2
]
==
'ref'
:
algorithm
=
path
[
-
3
]
else
:
algorithm
=
path
[
-
2
]
# Path to logic data is in the second to last line
logic_file
=
content
[
-
2
]
.
split
(
'/'
)[
-
1
][:
-
2
]
f
.
close
()
print
(
"Evaluating results for
%
s"
%
(
algorithm
))
dicts
=
parse_capture
(
logic_file
)
return
(
algorithm
,
dicts
)
def
gen_graph_chunks
(
bp_data
,
bp_labels
,
size
):
l
=
len
(
bp_data
)
for
i
in
range
(
0
,
(
l
//
size
)
*
size
,
size
):
chunk
=
bp_data
[
i
:
i
+
size
]
labels
=
bp_labels
[
i
:
i
+
size
]
plt
.
boxplot
(
chunk
,
labels
=
labels
)
plt
.
xticks
(
rotation
=
90
)
plt
.
show
()
# Let's also plot the leftover
rest
=
l
%
size
if
rest
!=
0
:
plt
.
boxplot
(
bp_data
[(
rest
-
2
*
rest
):])
plt
.
show
()
def
main
():
print
(
'THE LWC BENCHMARK SPLITTER'
)
print
(
'powered by Deutsche Bahn'
)
build_dir
=
'build/new/'
bp_data
=
[]
bp_labels
=
[]
for
d
in
os
.
listdir
(
build_dir
):
#dicts[0] --> algo
#dicts[1][0] --> enc
#dicts[1][1] --> dec
dicts
=
read_log
(
os
.
path
.
join
(
build_dir
+
d
))
enc_values
=
dicts
[
1
][
0
]
.
values
()
dec_values
=
dicts
[
1
][
1
]
.
values
()
bp_data
.
append
(
list
(
enc_values
))
bp_labels
.
append
(
dicts
[
0
])
print
(
"Average enc time[s] =
%
f"
%
(
statistics
.
mean
(
enc_values
)))
print
(
"Median enc time[s] =
%
f"
%
(
statistics
.
median
(
enc_values
)))
print
(
"Average dec time[s] =
%
f"
%
(
statistics
.
mean
(
dec_values
)))
print
(
"Median dec time[s] =
%
f"
%
(
statistics
.
median
(
dec_values
)))
print
()
enc_len
=
len
(
dicts
[
1
][
0
])
dec_len
=
len
(
dicts
[
1
][
1
])
if
dec_len
!=
enc_len
:
raise
Exception
(
"#Encryptions (
%
d) does not match #decryptions (
%
d)"
%
(
enc_len
,
dec_len
))
if
dec_len
!=
1089
or
enc_len
!=
1089
:
raise
Exception
(
"#Number of encrypted test vectors (
%
d)/ decrypted test vectors (
%
d) does not match guidelines (1089)"
%
(
enc_len
,
dec_len
))
gen_graph_chunks
(
bp_data
,
bp_labels
,
5
)
#plt.boxplot(bp_data, labels=bp_labels)
#plt.xticks(rotation=90)
#plt.show()
if
__name__
==
"__main__"
:
main
()
process_zip.sh
View file @
75eec6f5
...
...
@@ -30,17 +30,33 @@ function run() {
mkdir
-p
$DESTDIR
echo
"Compiling for template '
$TEMPLATE
' in directory '
$TMPDIR
'"
./compile_all.py
-s
$SUBMISSION
-t
"templates/
$TEMPLATE
"
-b
"
$TMPDIR
"
TEMPLATE_PATH
=
"templates/
$TEMPLATE
"
TEMPLATE_COMMIT
=
$(
git rev-list
-1
HEAD
--
"
$TEMPLATE_PATH
"
)
if
[
-z
"
$TEMPLATE_COMMIT
"
]
;
then
echo
"Could not retrieve the git commit of the template"
exit
1
fi
TEMPLATE_TIMESTAMP
=
$(
git show
-s
--format
=
%ct
"
$TEMPLATE_COMMIT
"
)
if
[
-z
"
$TEMPLATE_TIMESTAMP
"
]
;
then
echo
"Could not retrieve the git commit date of the template"
exit
1
fi
./compile_all.py
-s
$SUBMISSION
-t
$TEMPLATE_PATH
-b
"
$TMPDIR
"
for
cipher
in
$TMPDIR
/
*
;
do
if
[[
!
-d
$cipher
]]
;
then continue
;
fi
mkdir
-p
"./queues"
QUEUE_PATH
=
"./queues/
$TEMPLATE
"
CIPHER_SLUG
=
$(
basename
$cipher
)
TEST_PATH
=
"
$DESTDIR
/
$CIPHER_SLUG
"
CIPHER_TIMESTAMP
=
$(
cat
"
$cipher
/cipher_mtime.txt"
)
TEST_PATH
=
"
$DESTDIR
/
$CIPHER_SLUG
"
mkdir
-p
"
$TEST_PATH
"
||
exit
1
TEST_PATH
=
$(
realpath
$TEST_PATH
)
mv
$cipher
/
*
.log
"
$TEST_PATH
"
mv
"
$cipher
/LWC_AEAD_KAT.txt"
"
$TEST_PATH
"
...
...
@@ -61,15 +77,24 @@ function run() {
;;
esac
curl
\
--request
'POST'
\
--header
"Content-Type: application/json"
\
--data
"{
\"
path
\"
:
\"
$(
realpath
$TEST_PATH
)
\"
,
\"
template
\"
:
\"
$TEMPLATE
\"
}"
\
--data
"
\
{
\
\"
build_dir
\"
:
\"
$TEST_PATH
\"
,
\
\"
cipher
\"
:
\"
$CIPHER_SLUG
\"
,
\
\"
cipher_timestamp
\"
:
\"
$CIPHER_TIMESTAMP
\"
,
\
\"
template
\"
:
\"
$TEMPLATE
\"
,
\
\"
template_commit
\"
:
\"
$TEMPLATE_COMMIT
\"
,
\
\"
template_timestamp
\"
:
\"
$TEMPLATE_TIMESTAMP
\"\
}"
\
"http://127.0.0.1:5002/schedule_test"
done
rm
-rf
"
$TMPDIR
"
#
rm -rf "$TMPDIR"
}
if
[[
$1
==
"run"
]]
;
then
...
...
@@ -84,6 +109,7 @@ else
MAINDIR
=
email-submissions/
$(
date +%Y-%m-%d-%H:%M
)
mkdir
-p
$MAINDIR
TMPDIR
=
$(
mktemp
-d
-t
submission-XXXXXXXXXX
)
echo
"Extracting in
$TMPDIR
"
unzip
$ZIP_PATH
-d
$TMPDIR
for
i
in
templates/
*
;
do
TEMPLATE
=
"
${
i
##*/
}
"
...
...
templates/bluepill/configure
View file @
75eec6f5
#!/bin/bash
mv
-n
*
.c
*
.s
*
.S src/
# Rename all *.s to *.S
for
f
in
*
.s
;
do
mv
--
"
$f
"
"
${
f
%.s
}
.S"
done
mv
-n
*
.c
*
.S src/
mv
-n
*
.dat
*
.inc
*
.h include/
sed
-i
src/encrypt.c
-e
"s/
\(\s\)
init(/
\1
encrypt_init(/g"
exit
0
templates/bluepill/test
View file @
75eec6f5
...
...
@@ -38,6 +38,8 @@ class BluePill(DeviceUnderTestAeadUARTP):
self
.
firmware_path
=
os
.
path
.
join
(
build_dir
,
'firmware.elf'
)
self
.
firmware_bin_path
=
os
.
path
.
join
(
build_dir
,
'firmware.bin'
)
self
.
ram_pattern_path
=
os
.
path
.
join
(
self
.
template_path
,
'empty_ram.bin'
)
self
.
ram_dump_path
=
os
.
path
.
join
(
...
...
@@ -47,6 +49,9 @@ class BluePill(DeviceUnderTestAeadUARTP):
self
.
ocd
=
OpenOcd
(
self
.
openocd_cfg_path
)
def
firmware_size
(
self
):
return
os
.
stat
(
self
.
firmware_bin_path
)
.
st_size
def
flash
(
self
):
ocd_cmd
=
'program
%
s verify reset'
%
self
.
firmware_path
res
=
self
.
ocd
.
send
(
ocd_cmd
)
...
...
templates/esp32/configure
View file @
75eec6f5
#!/bin/bash
mv
-n
*
.c
*
.s
*
.S src/
# Rename all *.s to *.S
for
f
in
*
.s
;
do
mv
--
"
$f
"
"
${
f
%.s
}
.S"
done
mv
-n
*
.c
*
.S src/
mv
-n
*
.dat
*
.inc
*
.h include/
sed
-i
src/encrypt.c
-e
"s/
\(\s\)
init(/
\1
_init(/g"
exit
0
templates/esp32/test
View file @
75eec6f5
...
...
@@ -35,23 +35,33 @@ class ESP32(DeviceUnderTestAeadUARTP):
self
.
build_dir
=
build_dir
self
.
template_path
=
os
.
path
.
dirname
(
sys
.
argv
[
0
])
self
.
firmware_path
=
os
.
path
.
join
(
build_dir
,
'firmware.elf'
)
self
.
pio_packages_path
=
"/home/tester/.platformio/packages/"
self
.
esptoolpy_path
=
os
.
path
.
join
(
self
.
pio_packages_path
,
"tool-esptoolpy/esptool.py"
)
self
.
partitionspath
=
os
.
path
.
join
(
build_dir
,
'partitions.bin'
)
self
.
firmware_path
=
os
.
path
.
join
(
build_dir
,
'firmware.bin'
)
# Convert elf to bin
cmd
=
[
'python3'
,
self
.
esptoolpy_path
]
cmd
+=
[
'--chip'
,
'esp32'
]
cmd
+=
[
'elf2image'
,
os
.
path
.
join
(
build_dir
,
'firmware.elf'
)]
subprocess
.
check_call
(
cmd
)
def
firmware_size
(
self
):
return
os
.
stat
(
self
.
firmware_path
)
.
st_size
def
reset
(
self
):
self
.
ser
.
setDTR
(
False
)
# IO0=HIGH
self
.
ser
.
setRTS
(
True
)
# EN=LOW, chip in reset
self
.
ser
.
setRTS
(
True
)
# EN=LOW, chip in reset
1
time
.
sleep
(
0.1
)
self
.
ser
.
setDTR
(
False
)
# IO0=HIGH
self
.
ser
.
setRTS
(
False
)
# EN=HIGH, chip out of reset
time
.
sleep
(
1
)
def
flash
(
self
):
pio_packages_path
=
"/home/tester/.platformio/packages/"
esptoolpy_path
=
os
.
path
.
join
(
pio_packages_path
,
"tool-esptoolpy/esptool.py"
)
arduinoespressif32_path
=
os
.
path
.
join
(
pio_packages_path
,
"framework-arduinoespressif32/"
)
self
.
pio_packages_path
,
"framework-arduinoespressif32/"
)
boot_app0_path
=
os
.
path
.
join
(
arduinoespressif32_path
,
"tools/partitions/boot_app0.bin"
)
...
...
@@ -59,23 +69,14 @@ class ESP32(DeviceUnderTestAeadUARTP):
arduinoespressif32_path
,
"tools/sdk/bin/bootloader_dio_80m.bin"
)
elfpath
=
os
.
path
.
abspath
(
self
.
firmware_path
)
binpath
=
os
.
path
.
splitext
(
elfpath
)[
0
]
+
'.bin'
partpath
=
os
.
path
.
join
(
*
os
.
path
.
split
(
elfpath
)[:
-
1
],
'partitions.bin'
)
partitions
=
[
(
0xe000
,
boot_app0_path
),
(
0x1000
,
bootloader_path
),
(
0x10000
,
bin
path
),
(
0x8000
,
part
path
)
(
0x10000
,
self
.
firmware_
path
),
(
0x8000
,
self
.
partitions
path
)
]
cmd
=
[
'python3'
,
esptoolpy_path
]
cmd
+=
[
'--chip'
,
'esp32'
]
cmd
+=
[
'elf2image'
,
elfpath
]
subprocess
.
check_call
(
cmd
)
cmd
=
[
'python3'
,
esptoolpy_path
]
cmd
=
[
'python3'
,
self
.
esptoolpy_path
]
cmd
+=
[
'--chip'
,
'esp32'
]
cmd
+=
[
'--before'
,
'default_reset'
,
'--after'
,
'hard_reset'
]
cmd
+=
[
'--port'
,
self
.
uart_device
]
...
...
templates/f7/configure
View file @
75eec6f5
#!/bin/bash
# Rename all *.s to *.S
for
f
in
*
.s
;
do
mv
--
"
$f
"
"
${
f
%.s
}
.S"
done
mv
-n
*
.dat
*
.inc
*
.h Inc/
sed
-i
src/encrypt.c
-e
"s/
\(\s\)
init(/
\1
_init(/g"
mkdir
-p
/tmp/f7/Drivers
...
...
templates/f7/test
View file @
75eec6f5
...
...
@@ -47,6 +47,9 @@ class F7(DeviceUnderTestAeadUARTP):
self
.
ram_pattern_path
=
os
.
path
.
join
(
self
.
template_path
,
'ram_pattern.bin'
)
def
firmware_size
(
self
):
return
os
.
stat
(
self
.
firmware_path
)
.
st_size
def
flash
(
self
):
jlink
=
self
.
jlink
jlink
.
connect
(
'STM32F746ZG'
)
...
...
templates/maixduino/configure
View file @
75eec6f5
#!/bin/bash
mv
-n
*
.c
*
.s
*
.S src/
# Rename all *.s to *.S
for
f
in
*
.s
;
do
mv
--
"
$f
"
"
${
f
%.s
}
.S"
done
mv
-n
*
.c
*
.S src/
mv
-n
*
.dat
*
.inc
*
.h include/
sed
-i
src/encrypt.c
-e
"s/
\(\s\)
init(/
\1
_init(/g"
exit
0
templates/maixduino/test
View file @
75eec6f5
...
...
@@ -39,6 +39,9 @@ class Maixduino(DeviceUnderTestAeadUARTP):
self
.
firmware_path
=
os
.
path
.
join
(
build_dir
,
'firmware.bin'
)
def
firmware_size
(
self
):
return
os
.
stat
(
self
.
firmware_path
)
.
st_size
def
reset
(
self
):
if
self
.
ser
is
not
None
:
self
.
ser
.
close
()
...
...
templates/uno/configure
View file @
75eec6f5
#!/bin/bash
mv
-n
*
.c
*
.s
*
.S src/
# Rename all *.s to *.S
for
f
in
*
.s
;
do
mv
--
"
$f
"
"
${
f
%.s
}
.S"
done
mv
-n
*
.c
*
.S src/
mv
-n
*
.dat
*
.inc
*
.h include/
sed
-i
src/encrypt.c
-e
"s/
\(\s\)
init(/
\1
_init(/g"
exit
0
templates/uno/test
View file @
75eec6f5
...
...
@@ -3,6 +3,7 @@
import
os
import
sys
import
time
import
intelhex
import
subprocess
import
serial.tools.list_ports
from
test_common
import
(
...
...
@@ -38,6 +39,17 @@ class Uno(DeviceUnderTestAeadUARTP):
self
.
firmware_path
=
os
.
path
.
join
(
build_dir
,
'firmware.hex'
)
ih
=
intelhex
.
IntelHex
()
ih
.
loadhex
(
self
.
firmware_path
)
total_size
=
0
for
start
,
stop
in
ih
.
segments
():
print
(
start
,
stop
,
stop
-
start
)
total_size
+=
stop
-
start
self
.
_firmware_size
=
total_size
def
firmware_size
(
self
):
return
self
.
_firmware_size
def
reset
(
self
):
self
.
ser
.
setDTR
(
True
)
time
.
sleep
(
0.01
)
...
...
test-dude.py
View file @
75eec6f5
...
...
@@ -3,6 +3,7 @@
import
os
import
sys
import
time
import
signal
import
datetime
import
threading
...
...
@@ -10,6 +11,7 @@ import subprocess
from
flask
import
Flask
,
request
,
Response
from
flask_restful
import
Resource
,
Api
from
flask_jsonpify
import
jsonify
from
test_common
import
pack_results
app
=
Flask
(
__name__
,
static_folder
=
'.'
)
...
...
@@ -21,29 +23,51 @@ runners = []
class
ScheduledTest
:
def
__init__
(
self
,
template
,
path
):
self
.
template
=
template
self
.
path
=
path
__slots__
=
[
'id'
,
'state'
,
'added'
,
'lock'
,
'time_started'
,
'path'
,
'template'
,
'template_commit'
,
'template_timestamp'
,
'cipher'
,
'cipher_timestamp'
]
_unserialized_slots
=
[
'lock'
,
]
_next_id
=
1
def
__init__
(
self
,
**
kwargs
):
self
.
path
=
kwargs
[
'build_dir'
]
self
.
cipher
=
kwargs
[
'cipher'
]
self
.
cipher_timestamp
=
kwargs
[
'cipher_timestamp'
]
self
.
template_timestamp
=
kwargs
[
'template_timestamp'
]
self
.
template
=
kwargs
[
'template'
]
self
.
template_commit
=
kwargs
[
'template_commit'
]
self
.
id
=
str
(
ScheduledTest
.
_next_id
)
ScheduledTest
.
_next_id
+=
1
self
.
state
=
'SCHEDULED'
self
.
added
=
datetime
.
datetime
.
now
()
self
.
time_started
=
None
self
.
lock
=
threading
.
Lock
()
def
to_dict
(
self
):
return
{
'id'
:
str
(
id
(
self
)),
'template'
:
self
.
template
,
'state'
:
self
.
state
,
'path'
:
self
.
path
,
'added'
:
self
.
added
,
}
res
=
{}
for
k
in
ScheduledTest
.
__slots__
:
if
k
not
in
ScheduledTest
.
_unserialized_slots
:
res
[
k
]
=
getattr
(
self
,
k
)
return
res
class
Runner
(
threading
.
Thread
):
def
__init__
(
self
,
template
,
program
=
None
):
_next_id
=
1
def
__init__
(
self
,
template
,
platform
,
program
=
None
):
if
program
is
None
:
program
=
[
'python3'
,
'./templates/
%
s/test'
%
template
]
self
.
id
=
str
(
Runner
.
_next_id
)
Runner
.
_next_id
+=
1
self
.
template
=
template
self
.
platform
=
platform
self
.
program
=
program
self
.
process
=
None
self
.
job
=
None
...
...
@@ -55,71 +79,87 @@ class Runner(threading.Thread):
def
to_dict
(
self
):
return
{
'id'
:
s
tr
(
id
(
self
))
,
'id'
:
s
elf
.
id
,
'template'
:
self
.
template
,
'program'
:
' '
.
join
(
self
.
program
),
'job'
:
s
tr
(
id
(
self
.
job
))
if
self
.
job
is
not
None
else
None
'job'
:
s
elf
.
job
.
id
if
self
.
job
is
not
None
else
None
}
def
stop
(
self
):
self
.
stop_event
.
set
()
def
lock_new_job
(
self
):
my_queue
=
[
s
for
s
in
schedule
if
s
.
state
==
'SCHEDULED'
and
s
.
template
==
self
.
template
]
my_queue
.
sort
(
key
=
lambda
s
:
s
.
added
)
if
len
(
my_queue
)
==
0
:
# print("Runner %s has no jobs in queue" % self.template)
return
None
job
=
my_queue
[
0
]
with
job
.
lock
:
# Check if we were the first thread to choose this job
if
job
.
state
==
'SCHEDULED'
:
job
.
state
=
'RUNNING'
self
.
job
=
job
return
job
else
:
# Some other thread is running this test
# print("Runner %s could not lock a job" % self.template)
return
None
def
do_job
(
self
):
self
.
job
.
time_started
=
int
(
time
.
time
())
cmd
=
[]
cmd
+=
self
.
program
cmd
+=
[
self
.
job
.
path
]
print
(
"Executing ``
%
s´´"
%
' '
.
join
(
cmd
))
out_path
=
os
.
path
.
join
(
self
.
job
.
path
,
'test.stdout.log'
)
err_path
=
os
.
path
.
join
(
self
.
job
.
path
,
'test.stderr.log'
)
with
open
(
out_path
,
'w'
)
as
out_fd
,
\
open
(
err_path
,
'w'
)
as
err_fd
:
self
.
process
=
subprocess
.
Popen
(
cmd
,
stdout
=
out_fd
,
stderr
=
err_fd
)
while
self
.
process
.
poll
()
is
None
:
if
self
.
stop_event
.
wait
(
timeout
=
1
):
self
.
process
.
send_signal
(
signal
.
SIGINT
)
try
:
self
.
process
.
wait
(
timeout
=
1
)
except
subprocess
.
TimeoutExpired
:
pass
if
self
.
process
.
returncode
==
0
:
self
.
job
.
state
=
'SUCCESSFUL'
else
:
self
.
job
.
state
=
'FAILED'
self
.
process
=
None
pack_results
(
self
.
job
,
self
.
platform
)
def
run
(
self
):
while
not
self
.
stop_event
.
is_set
():
my_queue
=
[
s
for
s
in
schedule
if
s
.
state
==
'SCHEDULED'
and
s
.
template
==
self
.
template
]
my_queue
.
sort
(
key
=
lambda
s
:
s
.
added
)
if
len
(
my_queue
)
==
0
:
self
.
lock_new_job
()
if
self
.
job
is
None
:
# No tasks for this thread, go to sleep
self
.
stop_event
.
wait
(
timeout
=
5
)
continue
job
=
my_queue
[
0
]
with
job
.
lock
:
# Check if we were the first thread to choose this job
if
job
.
state
==
'SCHEDULED'
:
job
.
state
=
'RUNNING'
self
.
job
=
job
else
:
# Some other thread is running this test
continue
cmd
=
[]
cmd
+=
self
.
program
cmd
+=
[
self
.
job
.
path
]
print
(
"Executing ``
%
s´´"
%
' '
.
join
(
cmd
))
out_path
=
os
.
path
.
join
(
self
.
job
.
path
,
'test.stdout.log'
)
err_path
=
os
.
path
.
join
(
self
.
job
.
path
,
'test.stderr.log'
)
with
open
(
out_path
,
'w'
)
as
out_fd
,
\
open
(
err_path
,
'w'
)
as
err_fd
:
self
.
process
=
subprocess
.
Popen
(
cmd
,
stdout
=
out_fd
,
stderr
=
err_fd
)
while
self
.
process
.
poll
()
is
None
:
if
self
.
stop_event
.
wait
(
timeout
=
1
):
self
.
process
.
send_signal
(
signal
.
SIGINT
)
try
:
self
.
process
.
wait
(
timeout
=
1
)
except
subprocess
.
TimeoutExpired
:
pass
if
self
.
process
.
returncode
==
0
:
self
.
job
.
state
=
'SUCCESSFUL'
else
:
self
.
job
.
state
=
'FAILED'
self
.
process
=
None
subprocess
.
check_call
(
[
'zip'
,
'-r'
,
'results.zip'
,
'.'
],
cwd
=
self
.
job
.
path
)
try
:
self
.
do_job
()
except
Exception
as
ex
:
print
(
ex
)
print
(
"Job
%
d has finished"
%
id
(
self
.
job
)
)
print
(
"Job
%
s has finished"
%
self
.
job
.
id
)
self
.
job
=
None
print
(
"Thread
%
s has finished"
%
self
.
name
)
...
...
@@ -135,7 +175,7 @@ class Status(Resource):
class
RestartJob
(
Resource
):
def
get
(
self
,
job_id
):
job
=
[
job
for
job
in
schedule
if
str
(
id
(
job
))
==
job_id
]
job
=
[
job
for
job
in
schedule
if
job
.
id
==
job_id
]
job
=
job
[
0
]
if
len
(
job
)
>
0
else
None
if
job
is
None
:
return
'Job not found'
,
404
...
...
@@ -154,12 +194,15 @@ class ScheduleJob(Resource):
return
'Please send me JSON'
,
400
data
=
request
.
get_json
()
print
(
data
)
if
'path'
not
in
data
:
return
'path expected'
,
400
if
'template'
not
in
data
:
return
'template expected'
,
400
mandatory_fields
=
[
'build_dir'
,
'template'
,
'template_commit'
,
'template_timestamp'
,
'cipher'
,
'cipher_timestamp'
]
for
k
in
mandatory_fields
:
if
k
not
in
data
:
return
'field "
%
s" expected'
%
k
,
400
schedule
.
append
(
ScheduledTest
(
data
[
'template'
],
data
[
'path'
]
))
schedule
.
append
(
ScheduledTest
(
**
data
))
result
=
{
'success'
:
True
}
return
jsonify
(
result
)
...
...
@@ -177,7 +220,7 @@ def root():
@app.route
(
'/view_log/<string:job_id>/<int:log_id>'
)
def
view_log
(
job_id
,
log_id
):
job
=
[
job
for
job
in
schedule
if
str
(
id
(
job
))
==
job_id
]
job
=
[
job
for
job
in
schedule
if
job
.
id
==
job_id
]
job
=
job
[
0
]
if
len
(
job
)
>
0
else
None
if
job
is
None
:
return
'Job not found'
,
404
...
...
@@ -195,20 +238,34 @@ def view_log(job_id, log_id):
@app.route
(
'/results/<string:job_id>/results.zip'
)
def
get_results_zip
(
job_id
):
job
=
next
(
filter
(
lambda
job
:
str
(
id
(
job
))
==
job_id
,
schedule
),
None
)
job
=
next
(
filter
(
lambda
job
:
job
.
id
==
job_id
,
schedule
),
None
)
if
job
is
None
:
return
'Job not found'
,
404
zip_path
=
os
.
path
.
join
(
job
.
path
,
'results.zip'
)
if
not
os
.
path
.
isfile
(
zip_path
):
return
'File not found'
,
404
with
open
(
zip_path
,
'rb'
)
as
zip
:
return
Response
(
zip
.
read
(),
mimetype
=
'application/zip'
)
@app.route
(
'/results/<string:job_id>/results.json'
)
def
get_results_json
(
job_id
):
job
=
next
(
filter
(
lambda
job
:
job
.
id
==
job_id
,
schedule
),
None
)
if
job
is
None
:
return
'Job not found'
,
404
path
=
os
.
path
.
join
(
job
.
path
,
'results.json'
)
if
not
os
.
path
.
isfile
(
path
):
return
'File not found'
,
404
with
open
(
path
,
'rb'
)
as
zip
:
return
Response
(
zip
.
read
(),
mimetype
=
'application/json'
)
if
__name__
==
'__main__'
:
runners
.
append
(
Runner
(
'maixduino'
))
runners
.
append
(
Runner
(
'f7'
))
runners
.
append
(
Runner
(
'uno'
))
runners
.
append
(
Runner
(
'esp32'
))
runners
.
append
(
Runner
(
'bluepill'
))
runners
.
append
(
Runner
(
'maixduino'
,
'Maixduino'
))
runners
.
append
(
Runner
(
'f7'
,
'NUCLEO-F746ZG'
))
runners
.
append
(
Runner
(
'uno'
,
'Arduino Uno'
))
runners
.
append
(
Runner
(
'esp32'
,
'ESP32'
))
runners
.
append
(
Runner
(
'bluepill'
,
'BluePill'
))
def
signal_handler
(
signal
,
frame
):
print
(
"Process interrupted!"
,
file
=
sys
.
stderr
)
...
...
test_common.py
View file @
75eec6f5
...
...
@@ -3,11 +3,14 @@
import
os
import
re
import
sys
import
glob
import
json
import
time
import
fcntl
import
struct
import
socket
import
subprocess
import
numpy
as
np
def
eprint
(
*
args
,
**
kargs
):
...
...
@@ -186,33 +189,6 @@ def run_nist_aead_test_line(dut, i, m, ad, k, npub, c):
"expected ciphertext"
)
def
compare_dumps
(
dump_a
,
dump_b
):
"""
Gets the length of the longes streaks of equal bytes in two RAM dumps
"""
streaks
=
[]
streak_beg
=
0
streak_end
=
0
for
i
in
range
(
len
(
dump_a
)):
if
dump_a
[
i
]
==
dump_b
[
i
]:
streak_end
=
i
else
:
if
streak_end
!=
streak_beg
:
streaks
.
append
((
streak_beg
,
streak_end
))
streak_beg
=
i
streak_end
=
i
for
b
,
e
in
streaks
:
eprint
(
"equal bytes from 0x
%
x to 0x
%
x (length:
%
d)"
%
(
b
,
e
,
e
-
b
))
b
,
e
=
max
(
streaks
,
key
=
lambda
a
:
a
[
1
]
-
a
[
0
])
eprint
(
"longest equal bytes streak from 0x
%
x to 0x
%
x (length:
%
d)"
%
(
b
,
e
,
e
-
b
))
return
e
-
b
def
parse_nist_aead_test_vectors
(
test_file_path
):
with
open
(
test_file_path
,
'r'
)
as
test_file
:
lineprog
=
re
.
compile
(
...
...
@@ -229,6 +205,7 @@ def parse_nist_aead_test_vectors(test_file_path):
res
=
lineprog
.
match
(
line
)
if
line
==
""
:
yield
i
,
m
,
ad
,
k
,
npub
,
c
i
=
-
1
m
=
b
""
ad
=
b
""
k
=
b
""
...
...
@@ -255,6 +232,9 @@ def parse_nist_aead_test_vectors(test_file_path):
raise
Exception
(
"ERROR: unparsed line in test vectors file: '
%
s'"
%
line
)
if
i
>=
0
:
yield
i
,
m
,
ad
,
k
,
npub
,
c
class
TimeMeasurementTool
:
def
begin_measurement
(
self
):
...
...
@@ -447,10 +427,194 @@ class OpenOcd:
return
data
.
decode
(
'ascii'
)
def
pack_results
(
job
,
platform
):
build_dir
=
job
.
path
subprocess
.
call
(
[
'rm'
,
'results.zip'
,
'results.json'
],
cwd
=
build_dir
)
logic_path
=
os
.
path
.
join
(
build_dir
,
'logic_trace.csv'
)
logic_trace
=
[]
with
open
(
logic_path
,
'rt'
)
as
f
:
f
.
readline
()
# skip header
for
line
in
f
.
readlines
():
parts
=
line
.
split
(
','
)
t
=
float
(
parts
[
0
]
.
strip
())
v
=
int
(
parts
[
1
]
.
strip
(),
0
)
logic_trace
.
append
((
t
,
v
))
dips
=
find_dips
(
logic_trace
)
dips_durations
=
[
rais
-
fall
for
fall
,
rais
in
dips
]
ram_dumps
=
{}
for
dump_path
in
glob
.
glob
(
os
.
path
.
join
(
build_dir
,
"ram_dump.*.bin"
)):
dump_name
=
os
.
path
.
basename
(
dump_path
)
m
=
re
.
match
(
r"ram_dump.(\d+).bin"
,
dump_name
)
if
not
m
:
raise
Exception
(
"RAM dump has an unexpected name
%
s"
%
dump_name
)
idx
=
int
(
m
[
1
],
0
)
with
open
(
dump_path
,
'rb'
)
as
f
:
ram_dumps
[
idx
]
=
f
.
read
()
print
(
list
(
ram_dumps
.
keys
()))
if
0
in
ram_dumps
and
1
in
ram_dumps
:
total_memory
=
len
(
ram_dumps
[
0
])
untouched_memory
=
compare_dumps
(
ram_dumps
[
0
],
ram_dumps
[
1
])
print
(
" longest chunk of untouched memory =
%
d"
%
untouched_memory
)
memory_utilization
=
total_memory
-
untouched_memory
else
:
memory_utilization
=
None
with
open
(
os
.
path
.
join
(
build_dir
,
'firmware_size.txt'
),
'rt'
)
as
f
:
firmware_size
=
int
(
f
.
readline
(),
0
)
cipher_family
,
cipher_variant
,
cipher_impl
=
tuple
(
job
.
cipher
.
split
(
'.'
,
2
)
)
test_vectors_path
=
os
.
path
.
join
(
build_dir
,
'LWC_AEAD_KAT.txt'
)
test_vector
=
identify_test_vector
(
test_vectors_path
)
results
=
{
'format_version'
:
'1.0'
,
'test_timestamp'
:
job
.
time_started
,
'avg_enc_time'
:
np
.
mean
(
dips_durations
[
0
::
2
]),
'avg_dec_time'
:
np
.
mean
(
dips_durations
[
1
::
2
]),
'firmware_size'
:
firmware_size
,
'memory_utilization'
:
memory_utilization
,
'cipher'
:
{
'family'
:
cipher_family
,
'variant'
:
cipher_variant
,
'implementation'
:
cipher_impl
,
'timestamp'
:
job
.
cipher_timestamp
,
},
'template'
:
{
'name'
:
job
.
template
,
'timestamp'
:
job
.
template_timestamp
,
'commit'
:
job
.
template_commit
,
},
'platform'
:
{
'name'
:
platform
,
},
'test_vector'
:
test_vector
,
'dips'
:
dips_durations
,
}
json_path
=
os
.
path
.
join
(
build_dir
,
'results.json'
)
with
open
(
json_path
,
'wt'
)
as
f
:
json
.
dump
(
results
,
f
)
subprocess
.
check_call
(
[
'zip'
,
'-r'
,
'results.zip'
,
'.'
],
cwd
=
build_dir
)
def
find_dips
(
logic_trace
):
# There should be an even number of edges (2 edges for each dip)
assert
0
!=
len
(
logic_trace
)
assert
0
==
len
(
logic_trace
)
%
2
# First record should be a negative edge, last should be a positive one
assert
0
==
logic_trace
[
0
][
1
]
assert
0
!=
logic_trace
[
-
1
][
1
]
# Record the start and end times of every dip
dips
=
[]
for
i
in
range
(
0
,
len
(
logic_trace
),
2
):
assert
0
==
logic_trace
[
i
][
1
]
assert
0
!=
logic_trace
[
i
+
1
][
1
]
dips
.
append
((
logic_trace
[
i
][
0
],
logic_trace
[
i
+
1
][
0
]))
# Debounce dips by assuming that a data transfer
# between two dips takes at least 1 microsecond
THERESHOLD
=
1e-6
debounced
=
[]
i
=
0
while
i
<
len
(
dips
)
-
1
:
fall
,
rais
=
dips
[
i
]
next_fall
,
next_rais
=
dips
[
i
+
1
]
xfer_time
=
next_fall
-
rais
if
xfer_time
<
THERESHOLD
:
# Merge current dip with the next
dips
[
i
]
=
(
fall
,
next_rais
)
dips
.
pop
(
i
+
1
)
else
:
# Save current dip
debounced
.
append
((
fall
,
rais
))
i
+=
1
# Add the last dip
debounced
.
append
((
dips
[
i
][
0
],
dips
[
i
][
1
]))
dips
=
debounced
# There should be an even number of dips (encryption and decryption)
assert
0
==
len
(
dips
)
%
2
return
dips
def
compare_dumps
(
dump_a
,
dump_b
):
"""
Gets the length of the longes streaks of equal bytes in two RAM dumps
"""
streaks
=
[]
streak_beg
=
0
streak_end
=
0
for
i
in
range
(
len
(
dump_a
)):
if
dump_a
[
i
]
==
dump_b
[
i
]:
streak_end
=
i
else
:
if
streak_end
!=
streak_beg
:
streaks
.
append
((
streak_beg
,
streak_end
))
streak_beg
=
i
streak_end
=
i
for
b
,
e
in
streaks
:
print
(
"equal bytes from 0x
%
x to 0x
%
x (length:
%
d)"
%
(
b
,
e
,
e
-
b
))
b
,
e
=
max
(
streaks
,
key
=
lambda
a
:
a
[
1
]
-
a
[
0
])
print
(
"longest equal bytes streak from 0x
%
x to 0x
%
x (length:
%
d)"
%
(
b
,
e
,
e
-
b
))
return
e
-
b
def
identify_test_vector
(
kat_path
):
# Check if a provided test vector is the official NIST LWC or not
kat
=
list
(
parse_nist_aead_test_vectors
(
kat_path
))
def
is_nist_aead_kat
(
kat
):
if
len
(
kat
)
!=
1089
:
return
False
def
genstr
(
length
):
return
bytes
([
b
%
256
for
b
in
range
(
length
)])
expected_k
=
genstr
(
len
(
kat
[
0
][
3
]))
expected_npub
=
genstr
(
len
(
kat
[
0
][
4
]))
expected_i
=
0
for
i
,
m
,
ad
,
k
,
npub
,
c
in
kat
:
expected_m
=
genstr
((
i
-
1
)
//
33
)
expected_ad
=
genstr
((
i
-
1
)
%
33
)
expected_i
+=
1
if
not
(
expected_i
==
i
and
expected_m
==
m
and
expected_k
==
k
and
expected_ad
==
ad
and
expected_npub
==
npub
):
return
False
return
True
if
is_nist_aead_kat
(
kat
):
return
"NIST AEAD KAT"
return
None
def
run_nist_lws_aead_test
(
dut
,
vectors_file
,
build_dir
,
logic_mask
=
0xffff
):
kat
=
list
(
parse_nist_aead_test_vectors
(
vectors_file
))
firmware_size
=
dut
.
firmware_size
()
path
=
os
.
path
.
join
(
build_dir
,
'firmware_size.txt'
)
with
open
(
path
,
'wt'
)
as
f
:
print
(
firmware_size
,
file
=
f
)
dut
.
flash
()
dut
.
prepare
()
sys
.
stdout
.
write
(
"Board prepared
\n
"
)
...
...
@@ -470,9 +634,6 @@ def run_nist_lws_aead_test(dut, vectors_file, build_dir,
if
i
==
1
and
ram_dumps
[
0
]
is
not
None
:
ram_dumps
.
append
(
dut
.
dump_ram
())
longest
=
compare_dumps
(
ram_dumps
[
0
],
ram_dumps
[
1
])
print
(
" longest chunk of untouched memory =
%
d"
%
longest
)
except
Exception
as
ex
:
print
(
"TEST FAILED"
)
raise
ex
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment