Skip to content

Commit

Permalink
add tools which could help with pre-processing of the data collected …
Browse files Browse the repository at this point in the history
…over USB serial
  • Loading branch information
lucasguocn committed Oct 24, 2022
1 parent 60e3bcb commit 5ed938a
Show file tree
Hide file tree
Showing 4 changed files with 269 additions and 0 deletions.
31 changes: 31 additions & 0 deletions Arduino_BHY2/examples/SyncAndCollectIMUData/tools/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# process_nicla_bhy2_log_base64.py
This script is used to convert the log encoded in base64 to ascii
## Usage
```
./process_nicla_bhy2_log_base64.py [log_file_name]
```
## Example
./process_nicla_bhy2_log_base64.py log_nicla_bhy2.txt


# check_for_data_loss.sh
This script is used to check for any potential data loss during the transfer,
and it reports some errors if it does find any data loss

## Usage
```
./check_for_data_loss.sh [OPTION] [log_file_name]
```

## Example
- Example 1
```
./check_for_data_loss.sh -b ./minicom.log
```
The above command check for the data loss using the log "./minicom.log" which is base on base64 encoding
- Example 2
```
./check_for_data_loss.sh -a ./minicom.log
```
The above command check for the data loss using the log "./minicom.log" which is base on ascii encoding
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
#!/usr/bin/env python
# this script resamples the data from the log to 1/60hz

import sys
import time
import datetime
import pandas as pd
import math


#DEBUG = True
DEBUG = False
SEP_COLUMN = ','

LINE_TO_SKIP = 500

DOWNSAMPLE_FACTOR=1

def PDBG(*args):
if DEBUG:
print("DEBUG:", *args, file=sys.stderr)


PDBG('Number of arguments:', len(sys.argv))

if (len(sys.argv) < 2):
raise BaseException("missing argument")

filename_in = sys.argv[1]
file_out = None

if (len(sys.argv) > 2):
filename_out = sys.argv[2]
file_out = open(filename_out, 'w')
sys.stdout = file_out


line_cnt = 1 #first row is the header
df_in = pd.read_csv(filename_in, sep=SEP_COLUMN)

seq_last = None


for index, row in df_in.iterrows():
line_cnt += 1
if (line_cnt <= LINE_TO_SKIP):
continue

seq = row['seq']
if (seq_last is not None):
try:
delta = seq - seq_last
if (seq < seq_last):
delta += 10
if (delta != 1):
if (line_cnt < len(df_in.index)):
print("error: line: ", line_cnt, " has data missing, delta:", delta, "seq:", seq)
else:
PDBG("last line: ignored")
seq_last = seq
except:
if (line_cnt < len(df_in.index)):
print("error: line: ", line_cnt, " has data missing")
else:
PDBG("last line: ignored")
pass
else:
PDBG("first row")
seq_last = seq


if file_out is not None:
file_out.close()
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
#!/bin/sh


log_encoding_is_base64=false

if [ ":""$1" = ":-b" ] ; then
log_encoding_is_base64=true
echo "log is using base64 encoding"
elif [ ":""$1" = ":-a" ] ; then
log_encoding_is_base64=false
echo "log is using ascii encoding"
else
echo "usage: ./check_for_data_loss.sh [OPTION] [log_filename] [info_included_in_data]"
echo "\t[OPTION]:"
echo "\t\t" "-a"
echo "\t\t" "\t:log use ascii encoding"
echo "\t\t" "-b"
echo "\t\t" "\t:log use base64 encoding"
echo "\t[info_included_in_data]:"
echo "\t\t" '"accel" or "accel+meta"'
echo "\t\t" '"gyro" or "gyro+meta"'
echo "\t\t" '"accel+gyro" or "accel+gyro+meta"'


echo "\texample: ./check_for_data_loss.sh -b minicom.cap accel+gyro+meta"
return
fi

if [ ":""$2" = ":" ] ; then
log_file="./minicom.cap"
else
log_file="$2"
fi

if [ ":""$3" = ":" ] ; then
info_included_in_data="accel+gyro+meta"
else
info_included_in_data="$3"
fi


tmp_file="./tmp.csv"
log_file_cp="${log_file}.cp"
log_file_in="${log_file_cp}.tmp"

echo "log_file:$log_file"


if [ $log_encoding_is_base64 = true ] ; then
cp $log_file $log_file_cp
./process_nicla_bhy2_log_base64.py $log_file_cp $info_included_in_data > $log_file_in
else
cp $log_file $log_file_in
fi

echo "log_id,seq,ax,ay,az,gx,gy,gz" > $tmp_file

sed 's/.*\([a-zA-Z]\)\([0-9]\)/\1,\2/g' $log_file_in >> $tmp_file
./check_for_data_loss.py $tmp_file

rm -f $tmp_file
rm -f $log_file_cp
rm -f $log_file_in
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
#!/usr/bin/env python
import math
import base64
import struct
import sys

#configurations
#DEBUG = True
DEBUG = False
lines_to_ignore = 500

# the following should be set according to the fimrware settings on the Nicla Senes ME firmware/sketch
data_include_acc = False
data_include_gyr = False
data_include_meta_info = False


if (len(sys.argv) > 1):
log_file_name = sys.argv[1]
else:
log_file_name = 'log_nicla_bhy2.txt'

if (len(sys.argv) > 2):
info_included_in_data = sys.argv[2]
if "accel" in info_included_in_data:
data_include_acc = True
if DEBUG:
print("accel data included")
if "gyro" in info_included_in_data:
data_include_gyr = True
if DEBUG:
print("gyro data included")
if "meta" in info_included_in_data:
data_include_meta_info= True
if DEBUG:
print("meta data included")

#working variables
file_log = open(log_file_name, 'r')
lines = file_log.readlines()

lineCnt = 0
record_len = 0
record_len_before_encoding = 0

if data_include_meta_info:
record_len_before_encoding += 2

if data_include_acc:
record_len_before_encoding += 6

if data_include_gyr:
record_len_before_encoding += 6


record_len = int(math.ceil(record_len_before_encoding / 3)) * 4
if DEBUG:
print("record_len:", record_len)


DEBUG = False
# Strips the newline character
for line in lines:
lineCnt += 1

if (lineCnt <= lines_to_ignore):
continue

line = line.strip()
len_line = len(line)

if (len_line < record_len) or (line[len_line - 1] != '='):
if (lineCnt < len(lines)):
print("line:", lineCnt, " skipped")
else:
if DEBUG:
print("last line:", lineCnt, " skipped")
continue

line = line[-record_len:]
try:
data_imu = base64.b64decode(line)
if DEBUG:
print(data_imu)
print(len(data_imu))

try:
if (data_include_acc and data_include_gyr):
(log_id, seq, ax,ay,az,gx,gy,gz) = struct.unpack("<cBhhhhhh", data_imu)
log_id = log_id.decode('ascii')
print(log_id.strip(), ",", seq, ",", ax, ",", ay, ",", az, ",", gx,",", gy, ",", gz, sep='')
else:
(log_id,seq, x,y,z) = struct.unpack("<cBhhh", data_imu)
log_id = log_id.decode('ascii')
print(log_id, ",", seq, ",", x,",", y, ",", z, sep='')
except struct.error:
print("error: line:", lineCnt, " unpack")

except base64.binascii.Error:
print("error: line:", lineCnt, " incomplete data")
pass

0 comments on commit 5ed938a

Please sign in to comment.