Skip to content

Commit

Permalink
Merge pull request #2 from Rouast-Labs/development
Browse files Browse the repository at this point in the history
Support new version of the API
  • Loading branch information
prouast authored Jul 6, 2024
2 parents 998b20c + aa433a4 commit fdfbc70
Show file tree
Hide file tree
Showing 14 changed files with 277 additions and 138 deletions.
7 changes: 5 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ name: Tests

on:
push:
branches: ["main"]
branches: ["main", "development"]
pull_request:
branches: ["main"]
branches: ["main", "development"]

jobs:
build:
Expand Down Expand Up @@ -32,6 +32,9 @@ jobs:
run: |
python -m pip install --upgrade pip
python -m pip install ".[test]"
- name: Set API_URL for development branch
if: github.ref == 'refs/heads/development'
run: echo "API_URL=https://api.rouast.com/vitallens-dev" >> $GITHUB_ENV
- name: Lint with flake8
run: |
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
Expand Down
48 changes: 31 additions & 17 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,30 +77,44 @@ The estimation results are returned as a `list`. It contains a `dict` for each d
```
[
{
'face': <face coords for each frame as np.ndarray of shape (n_frames, 4)>,
'pulse': {
'val': <estimated pulse waveform val for each frame as np.ndarray of shape (n_frames,)>,
'conf': <estimation confidence for each frame as np.ndarray of shape (n_frames,)>,
'face': {
'coordinates': <Face coordinates for each frame as np.ndarray of shape (n_frames, 4)>,
'confidence': <Face live confidence for each frame as np.ndarray of shape (n_frames,)>,
'note': <Explanatory note>
},
'resp': {
'val': <estimated respiration waveform val for each frame as np.ndarray of shape (n_frames,)>,
'conf': <estimation confidence for each frame as np.ndarray of shape (n_frames,)>,
'vital_signs': {
'heart_rate': {
'value': <Estimated value as float scalar>,
'unit': <Value unit>,
'confidence': <Estimation confidence as float scalar>,
'note': <Explanatory note>
},
'respiratory_rate': {
'value': <Estimated value as float scalar>,
'unit': <Value unit>,
'confidence': <Estimation confidence as float scalar>,
'note': <Explanatory note>
},
'ppg_waveform': {
'data': <Estimated waveform value for each frame as np.ndarray of shape (n_frames,)>,
'unit': <Data unit>,
'confidence': <Estimation confidence for each frame as np.ndarray of shape (n_frames,)>,
'note': <Explanatory note>
},
'respiratory_waveform': {
'data': <Estimated waveform value for each frame as np.ndarray of shape (n_frames,)>,
'unit': <Data unit>,
'confidence': <Estimation confidence for each frame as np.ndarray of shape (n_frames,)>,
'note': <Explanatory note>
},
},
'hr': {
'val': <estimated heart rate as float scalar>,
'conf': <estimation confidence as float scalar>,
},
'rr': {
'val': <estimated respiratory rate as float scalar>,
'conf': <estimation confidence as float scalar>,
},
'live': <liveness estimation for each frame as np.ndarray of shape (n_frames,)>,
"message": <Message about estimates>
},
{
<same structure for face 2 if present>
},
...
]
]
```

### Example: Use VitalLens API to estimate vitals from a video file
Expand Down
29 changes: 16 additions & 13 deletions examples/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,25 +40,28 @@ def run(args=None):
time_ms = (stop-start)*1000
print("Inference time: {:.2f} ms".format(time_ms))
# Plot the results
if 'resp' in result[0]:
vital_signs = result[0]['vital_signs']
if "respiratory_waveform" in vital_signs:
fig, (ax1, ax2) = plt.subplots(2, sharex=True, figsize=(12, 6))
else:
fig, ax1 = plt.subplots(1, figsize=(12, 6))
fig.suptitle('Vital signs estimated from {} using {} in {:.2f} ms'.format(args.video_path, args.method.name, time_ms))
if "pulse" in result[0] and ppg_gt is not None:
if "ppg_waveform" in vital_signs and ppg_gt is not None:
hr_gt = estimate_freq(ppg_gt, f_s=fps, f_res=0.005, f_range=(40./60., 240./60.), method='periodogram') * 60.
ax1.plot(ppg_gt, color=COLOR_GT, label='Pulse Ground Truth -> HR: {:.1f} bpm'.format(hr_gt))
if "resp" in result[0] and resp_gt is not None:
rr_label = estimate_freq(resp_gt, f_s=fps, f_res=0.005, f_range=(4./60., 90./60.), method='periodogram') * 60.
ax2.plot(resp_gt, color=COLOR_GT, label='Resp Ground Truth -> RR: {:.1f} bpm'.format(rr_label))
if "pulse" in result[0]:
ax1.plot(result[0]['pulse']['val'], color=METHOD_COLORS[args.method], label='Pulse Estimate -> HR: {:.1f} bpm ({:.1f}% confidence)'.format(result[0]['hr']['val'], result[0]['hr']['conf']*100))
ax1.plot(result[0]['pulse']['conf'], color=METHOD_COLORS[args.method], label='Pulse Estimation Confidence')
if "resp" in result[0]:
ax2.plot(result[0]['resp']['val'], color=METHOD_COLORS[args.method], label='Resp Estimate -> RR: {:.1f} bpm ({:.1f}% confidence)'.format(result[0]['rr']['val'], result[0]['rr']['conf']*100))
ax2.plot(result[0]['resp']['conf'], color=METHOD_COLORS[args.method], label='Resp Estimation Confidence')
ax1.plot(ppg_gt, color=COLOR_GT, label='PPG Waveform Ground Truth -> HR: {:.1f} bpm'.format(hr_gt))
if "respiratory_waveform" in vital_signs and resp_gt is not None:
rr_gt = estimate_freq(resp_gt, f_s=fps, f_res=0.005, f_range=(4./60., 90./60.), method='periodogram') * 60.
ax2.plot(resp_gt, color=COLOR_GT, label='Respiratory Waveform Ground Truth -> RR: {:.1f} bpm'.format(rr_gt))
if "ppg_waveform" in vital_signs:
ax1.plot(vital_signs['ppg_waveform']['data'], color=METHOD_COLORS[args.method], label='PPG Waveform Estimate -> HR: {:.1f} bpm ({:.0f}% confidence)'.format(
vital_signs['heart_rate']['value'], vital_signs['heart_rate']['confidence']*100))
ax1.plot(vital_signs['ppg_waveform']['confidence'], color=METHOD_COLORS[args.method], label='PPG Waveform Estimation Confidence')
if "respiratory_waveform" in vital_signs:
ax2.plot(vital_signs['respiratory_waveform']['data'], color=METHOD_COLORS[args.method], label='Respiratory Waveform Estimate -> RR: {:.1f} bpm ({:.0f}% confidence)'.format(
vital_signs['respiratory_rate']['value'], vital_signs['respiratory_rate']['confidence']*100))
ax2.plot(vital_signs['respiratory_waveform']['confidence'], color=METHOD_COLORS[args.method], label='Respiratory Waveform Estimation Confidence')
ax1.legend()
if 'resp' in result[0]: ax2.legend()
if 'respiratory_waveform' in vital_signs: ax2.legend()
plt.show()

def method_type(name):
Expand Down
25 changes: 15 additions & 10 deletions tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,12 @@ def test_VitalLens(request, method, detect_faces, file):
test_video_fps = request.getfixturevalue('test_video_fps')
result = vl(test_video_ndarray, fps=test_video_fps, faces = None if detect_faces else [247, 57, 440, 334])
assert len(result) == 1
assert result[0]['face'].shape == (360, 4)
assert result[0]['pulse']['val'].shape == (360,)
np.testing.assert_allclose(result[0]['hr']['val'], 60, atol=10)
assert result[0]['face']['coordinates'].shape == (360, 4)
assert result[0]['face']['confidence'].shape == (360,)
assert result[0]['vital_signs']['ppg_waveform']['data'].shape == (360,)
assert result[0]['vital_signs']['ppg_waveform']['confidence'].shape == (360,)
np.testing.assert_allclose(result[0]['vital_signs']['heart_rate']['value'], 60, atol=10)
assert result[0]['vital_signs']['heart_rate']['confidence'] == 1.0

def test_VitalLens_API(request):
api_key = request.getfixturevalue('test_dev_api_key')
Expand All @@ -50,10 +53,12 @@ def test_VitalLens_API(request):
test_video_fps = request.getfixturevalue('test_video_fps')
result = vl(test_video_ndarray, fps=test_video_fps, faces=None)
assert len(result) == 1
assert result[0]['face'].shape == (360, 4)
assert result[0]['pulse']['val'].shape == (360,)
assert result[0]['pulse']['conf'].shape == (360,)
assert result[0]['resp']['val'].shape == (360,)
assert result[0]['resp']['conf'].shape == (360,)
np.testing.assert_allclose(result[0]['hr']['val'], 60, atol=0.5)
np.testing.assert_allclose(result[0]['rr']['val'], 13.5, atol=0.5)
assert result[0]['face']['coordinates'].shape == (360, 4)
assert result[0]['vital_signs']['ppg_waveform']['data'].shape == (360,)
assert result[0]['vital_signs']['ppg_waveform']['confidence'].shape == (360,)
assert result[0]['vital_signs']['respiratory_waveform']['data'].shape == (360,)
assert result[0]['vital_signs']['respiratory_waveform']['confidence'].shape == (360,)
np.testing.assert_allclose(result[0]['vital_signs']['heart_rate']['value'], 60, atol=0.5)
np.testing.assert_allclose(result[0]['vital_signs']['heart_rate']['confidence'], 1.0, atol=0.1)
np.testing.assert_allclose(result[0]['vital_signs']['respiratory_rate']['value'], 13.5, atol=0.5)
np.testing.assert_allclose(result[0]['vital_signs']['respiratory_rate']['confidence'], 1.0, atol=0.1)
36 changes: 27 additions & 9 deletions tests/test_simple_rppg_method.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,17 @@ def test_CHROMRPPGMethod(request, override_fps_target):
test_video_ndarray = request.getfixturevalue('test_video_ndarray')
test_video_fps = request.getfixturevalue('test_video_fps')
test_video_faces = request.getfixturevalue('test_video_faces')
sig, conf, live = method(
data, unit, conf, note, live = method(
frames=test_video_ndarray, faces=test_video_faces,
fps=test_video_fps, override_fps_target=override_fps_target)
assert sig.shape == (1, test_video_ndarray.shape[0])
np.testing.assert_equal(conf, np.ones((1, test_video_ndarray.shape[0]), np.float32))
assert all(key in data for key in method.signals)
assert all(key in unit for key in method.signals)
assert all(key in conf for key in method.signals)
assert all(key in note for key in method.signals)
assert data['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
assert conf['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
np.testing.assert_equal(conf['ppg_waveform'], np.ones((test_video_ndarray.shape[0],), np.float32))
assert conf['heart_rate'] == 1.0
np.testing.assert_equal(live, np.ones((test_video_ndarray.shape[0],), np.float32))

@pytest.mark.parametrize("override_fps_target", [None, 15])
Expand All @@ -58,11 +64,17 @@ def test_GRPPGMethod(request, override_fps_target):
test_video_ndarray = request.getfixturevalue('test_video_ndarray')
test_video_fps = request.getfixturevalue('test_video_fps')
test_video_faces = request.getfixturevalue('test_video_faces')
sig, conf, live = method(
data, unit, conf, note, live = method(
frames=test_video_ndarray, faces=test_video_faces,
fps=test_video_fps, override_fps_target=override_fps_target)
assert sig.shape == (1, test_video_ndarray.shape[0])
np.testing.assert_equal(conf, np.ones((1, test_video_ndarray.shape[0]), np.float32))
assert all(key in data for key in method.signals)
assert all(key in unit for key in method.signals)
assert all(key in conf for key in method.signals)
assert all(key in note for key in method.signals)
assert data['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
assert conf['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
np.testing.assert_equal(conf['ppg_waveform'], np.ones((test_video_ndarray.shape[0],), np.float32))
assert conf['heart_rate'] == 1.0
np.testing.assert_equal(live, np.ones((test_video_ndarray.shape[0],), np.float32))

@pytest.mark.parametrize("override_fps_target", [None, 15])
Expand All @@ -76,9 +88,15 @@ def test_POSRPPGMethod(request, override_fps_target):
test_video_ndarray = request.getfixturevalue('test_video_ndarray')
test_video_fps = request.getfixturevalue('test_video_fps')
test_video_faces = request.getfixturevalue('test_video_faces')
sig, conf, live = method(
data, unit, conf, note, live = method(
frames=test_video_ndarray, faces=test_video_faces,
fps=test_video_fps, override_fps_target=override_fps_target)
assert sig.shape == (1, test_video_ndarray.shape[0])
np.testing.assert_equal(conf, np.ones((1, test_video_ndarray.shape[0]), np.float32))
assert all(key in data for key in method.signals)
assert all(key in unit for key in method.signals)
assert all(key in conf for key in method.signals)
assert all(key in note for key in method.signals)
assert data['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
assert conf['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
np.testing.assert_equal(conf['ppg_waveform'], np.ones((test_video_ndarray.shape[0],), np.float32))
assert conf['heart_rate'] == 1.0
np.testing.assert_equal(live, np.ones((test_video_ndarray.shape[0],), np.float32))
59 changes: 41 additions & 18 deletions tests/test_vitallens.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,24 +67,29 @@ def create_mock_api_response(
api_key = headers["x-api-key"]
if api_key is None or not isinstance(api_key, str) or len(api_key) < 30:
return create_mock_response(
status_code=403, json_data={"signal": None, "conf": None, "live": None, "message": "Error"})
status_code=403, json_data={"vital_signs": None, "face": None, "state": None, "message": "Error"})
video_base64 = json["video"]
if video_base64 is None:
return create_mock_response(
status_code=400, json_data={"signal": None, "conf": None, "live": None, "message": "Error"})
status_code=400, json_data={"vital_signs": None, "face": None, "state": None, "message": "Error"})
try:
video = np.frombuffer(base64.b64decode(video_base64), dtype=np.uint8)
video = video.reshape((-1, 40, 40, 3))
except Exception as e:
return create_mock_response(status_code=400, json_data={"signal": None, "conf": None, "live": None, "message": "Error: {}".format(e)})
return create_mock_response(status_code=400, json_data={"vital_signs": None, "face": None, "state": None, "message": "Error: {}".format(e)})
if video.shape[0] < API_MIN_FRAMES or video.shape[0] > API_MAX_FRAMES:
return create_mock_response(status_code=400, json_data={"signal": None, "conf": None, "live": None, "message": "Error"})
return create_mock_response(status_code=400, json_data={"vital_signs": None, "face": None, "state": None, "message": "Error"})
return create_mock_response(
status_code=200,
json_data={"signal": np.random.rand(2, video.shape[0]).tolist(),
"conf": np.random.rand(2, video.shape[0]).tolist(),
"live": np.random.rand(video.shape[0]).tolist(),
"message": ""})
json_data={
"vital_signs": {
"heart_rate": {"value": 60.0, "unit": "bpm", "confidence": 0.99, "note": "Note"},
"respiratory_rate": {"value": 15.0, "unit": "bpm", "confidence": 0.97, "note": "Note"},
"ppg_waveform": {"data": np.random.rand(video.shape[0]).tolist(), "unit": "unitless", "confidence": np.ones(video.shape[0]).tolist(), "note": "Note"},
"respiratory_waveform": {"data": np.random.rand(video.shape[0]).tolist(), "unit": "unitless", "confidence": np.ones(video.shape[0]).tolist(), "note": "Note"}},
"face": {"confidence": np.random.rand(video.shape[0]).tolist(), "note": "Note"},
"state": {"data": np.zeros((2, 128), dtype=np.float32).tolist(), "note": "Note"},
"message": "Message"})

@pytest.mark.parametrize("file", [True, False])
@pytest.mark.parametrize("override_fps_target", [None, 15, 10])
Expand All @@ -101,22 +106,29 @@ def test_VitalLensRPPGMethod_mock(mock_post, request, file, override_fps_target,
test_video_faces = request.getfixturevalue('test_video_faces')
method = VitalLensRPPGMethod(config, api_key=api_key)
if file:
sig, conf, live = method(
data, unit, conf, note, live = method(
frames=test_video_path, faces=test_video_faces,
override_fps_target=override_fps_target)
else:
if long:
n_repeats = (API_MAX_FRAMES * 3) // test_video_ndarray.shape[0] + 1
test_video_ndarray = np.repeat(test_video_ndarray, repeats=n_repeats, axis=0)
test_video_faces = np.repeat(test_video_faces, repeats=n_repeats, axis=0)
sig, conf, live = method(
data, unit, conf, note, live = method(
frames=test_video_ndarray, faces=test_video_faces,
fps=test_video_fps, override_fps_target=override_fps_target)
assert sig.shape == (2, test_video_ndarray.shape[0])
assert conf.shape == (2, test_video_ndarray.shape[0])
assert all(key in data for key in method.signals)
assert all(key in unit for key in method.signals)
assert all(key in conf for key in method.signals)
assert all(key in note for key in method.signals)
assert data['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
assert conf['ppg_waveform'].shape == (test_video_ndarray.shape[0],)
assert data['respiratory_waveform'].shape == (test_video_ndarray.shape[0],)
assert conf['respiratory_waveform'].shape == (test_video_ndarray.shape[0],)
assert live.shape == (test_video_ndarray.shape[0],)

def test_VitalLens_API_valid_response(request):
@pytest.mark.parametrize("process_signals", [True, False])
def test_VitalLens_API_valid_response(request, process_signals):
config = load_config("vitallens.yaml")
api_key = request.getfixturevalue('test_dev_api_key')
test_video_ndarray = request.getfixturevalue('test_video_ndarray')
Expand All @@ -127,15 +139,26 @@ def test_VitalLens_API_valid_response(request):
roi=test_video_faces[0].tolist(), library='prpy', scale_algorithm='bilinear')
headers = {"x-api-key": api_key}
payload = {"video": base64.b64encode(frames[:16].tobytes()).decode('utf-8')}
if process_signals: payload['fps'] = str(30)
response = requests.post(API_URL, headers=headers, json=payload)
response_body = json.loads(response.text)
assert response.status_code == 200
sig = np.asarray(response_body["signal"])
conf = np.asarray(response_body["conf"])
live = np.asarray(response_body["live"])
assert sig.shape == (2, 16)
assert conf.shape == (2, 16)
assert all(key in response_body for key in ["face", "vital_signs", "state", "message"])
vital_signs = response_body["vital_signs"]
assert all(key in vital_signs for key in ["ppg_waveform", "respiratory_waveform"])
ppg_waveform_data = np.asarray(response_body["vital_signs"]["ppg_waveform"]["data"])
ppg_waveform_conf = np.asarray(response_body["vital_signs"]["ppg_waveform"]["confidence"])
resp_waveform_data = np.asarray(response_body["vital_signs"]["respiratory_waveform"]["data"])
resp_waveform_conf = np.asarray(response_body["vital_signs"]["respiratory_waveform"]["confidence"])
assert ppg_waveform_data.shape == (16,)
assert ppg_waveform_conf.shape == (16,)
assert resp_waveform_data.shape == (16,)
assert resp_waveform_conf.shape == (16,)
assert all((key in vital_signs) if process_signals else (key not in vital_signs) for key in ["heart_rate", "respiratory_rate"])
live = np.asarray(response_body["face"]["confidence"])
assert live.shape == (16,)
state = np.asarray(response_body["state"]["data"])
assert state.shape == (2, 128)

def test_VitalLens_API_wrong_api_key(request):
config = load_config("vitallens.yaml")
Expand Down
Loading

0 comments on commit fdfbc70

Please sign in to comment.