Compare commits

...

1 Commits

Author SHA1 Message Date
Alie Langston
139689ccf7 added object tracking
moved load of library

updated test

removed async

trying to retest

Retesting

added test back

fixed errors due to next button

removed try catch so errors occur

added try catch back

added in ignore

readded libraries

stops detection when photo is taken, stops erroring issue

added help text

added spinner and better mocked blazeface

moved img element back to correct place

updated for requested changes

added coco-ssd object tracking

updated ranges

updated messages and ranges

updated ranges
2020-09-01 11:10:29 -04:00
9 changed files with 366 additions and 24 deletions

122
package-lock.json generated
View File

@@ -4879,6 +4879,68 @@
"loader-utils": "^1.2.3"
}
},
"@tensorflow-models/coco-ssd": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@tensorflow-models/coco-ssd/-/coco-ssd-1.0.0.tgz",
"integrity": "sha512-j6fpkBWD8f8Qi0jgnJ4SM4uJr7bfq281rRyrUEszO8k/Jfzi6Mnsd9vTtBvv3SPTLYaJGsZHOBjzUsW4z1XWXg=="
},
"@tensorflow/tfjs": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-1.6.0.tgz",
"integrity": "sha512-HXysT6vJ/U8FBbDMfXrlTFwtqj+XCTsEjwDiB0KOed0DF2S3Jb8NPraR0pAQ2iuDZ2uZuEAdG7/EyubDTPz2/Q==",
"requires": {
"@tensorflow/tfjs-converter": "1.6.0",
"@tensorflow/tfjs-core": "1.6.0",
"@tensorflow/tfjs-data": "1.6.0",
"@tensorflow/tfjs-layers": "1.6.0"
}
},
"@tensorflow/tfjs-converter": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-1.6.0.tgz",
"integrity": "sha512-jKu4rwBVAjQAH4+LiPcv0CIuj5uW4PDTb9HvlqcLm/43e7uAd7Qus74Dy82pwVOrGhv3BPD4/GZYrzOKxGbKEQ=="
},
"@tensorflow/tfjs-core": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-1.6.0.tgz",
"integrity": "sha512-b98jn1pjRuEDVNN6/ZQMFhyYV27ZIsG9CcHSMXq1ohX6ALQQB3mwgrMeC2TEVXFl6/L2vOD8W+txuBRGKHnvpg==",
"requires": {
"@types/offscreencanvas": "~2019.3.0",
"@types/seedrandom": "2.4.27",
"@types/webgl-ext": "0.0.30",
"@types/webgl2": "0.0.4",
"node-fetch": "~2.1.2",
"seedrandom": "2.4.3"
},
"dependencies": {
"node-fetch": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U="
}
}
},
"@tensorflow/tfjs-data": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-data/-/tfjs-data-1.6.0.tgz",
"integrity": "sha512-UCEzFh05LzkT60+MFoH4dmjT5BpfSJm5atSKQhXZz+pzSAXbbcraBNyqxoEqV9Rj8rOHEmUqkHyPvn4o5GLQYQ==",
"requires": {
"@types/node-fetch": "^2.1.2",
"node-fetch": "~2.1.2"
},
"dependencies": {
"node-fetch": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U="
}
}
},
"@tensorflow/tfjs-layers": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-1.6.0.tgz",
"integrity": "sha512-Nb6yNJT35vbYK/ZiI7wxLcI9BrvGA7GJ7OL+9/B7Mrsq3gq1A8Pv2ULQhPZOvR0J1zl83AMZAACjuGS+L6nLAA=="
},
"@testing-library/dom": {
"version": "7.21.5",
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-7.21.5.tgz",
@@ -5534,8 +5596,28 @@
"@types/node": {
"version": "12.11.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.11.1.tgz",
"integrity": "sha512-TJtwsqZ39pqcljJpajeoofYRfeZ7/I/OMUQ5pR4q5wOKf2ocrUvBAZUMhWsOvKx3dVc/aaV5GluBivt0sWqA5A==",
"dev": true
"integrity": "sha512-TJtwsqZ39pqcljJpajeoofYRfeZ7/I/OMUQ5pR4q5wOKf2ocrUvBAZUMhWsOvKx3dVc/aaV5GluBivt0sWqA5A=="
},
"@types/node-fetch": {
"version": "2.5.7",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.7.tgz",
"integrity": "sha512-o2WVNf5UhWRkxlf6eq+jMZDu7kjgpgJfl4xVNlvryc95O/6F2ld8ztKX+qu+Rjyet93WAWm5LjeX9H5FGkODvw==",
"requires": {
"@types/node": "*",
"form-data": "^3.0.0"
},
"dependencies": {
"form-data": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.0.tgz",
"integrity": "sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==",
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
}
}
}
},
"@types/normalize-package-data": {
"version": "2.4.0",
@@ -5548,6 +5630,11 @@
"resolved": "https://registry.npmjs.org/@types/object-assign/-/object-assign-4.0.30.tgz",
"integrity": "sha1-iUk3HVqZ9Dge4PHfCpt6GH4H5lI="
},
"@types/offscreencanvas": {
"version": "2019.3.0",
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
"integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q=="
},
"@types/prettier": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.0.2.tgz",
@@ -5589,6 +5676,11 @@
"@types/react": "*"
}
},
"@types/seedrandom": {
"version": "2.4.27",
"resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
},
"@types/stack-utils": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-1.0.1.tgz",
@@ -5609,6 +5701,16 @@
"resolved": "https://registry.npmjs.org/@types/warning/-/warning-3.0.0.tgz",
"integrity": "sha1-DSUBJorY+ZYrdA04fEZU9fjiPlI="
},
"@types/webgl-ext": {
"version": "0.0.30",
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
"integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
},
"@types/webgl2": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/@types/webgl2/-/webgl2-0.0.4.tgz",
"integrity": "sha512-PACt1xdErJbMUOUweSrbVM7gSIYm1vTncW2hF6Os/EeWi6TXYAYMPp+8v6rzHmypE5gHrxaxZNXgMkJVIdZpHw=="
},
"@types/yargs": {
"version": "13.0.7",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-13.0.7.tgz",
@@ -6444,8 +6546,7 @@
"asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=",
"dev": true
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
},
"atob": {
"version": "2.1.2",
@@ -8152,7 +8253,6 @@
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dev": true,
"requires": {
"delayed-stream": "~1.0.0"
}
@@ -9124,8 +9224,7 @@
"delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=",
"dev": true
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
},
"delegates": {
"version": "1.0.0",
@@ -20086,14 +20185,12 @@
"mime-db": {
"version": "1.43.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz",
"integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==",
"dev": true
"integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ=="
},
"mime-types": {
"version": "2.1.26",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz",
"integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==",
"dev": true,
"requires": {
"mime-db": "1.43.0"
}
@@ -24537,6 +24634,11 @@
}
}
},
"seedrandom": {
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
"integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
},
"seek-bzip": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.5.tgz",

View File

@@ -38,6 +38,10 @@
"@fortawesome/free-regular-svg-icons": "5.7.2",
"@fortawesome/free-solid-svg-icons": "5.8.2",
"@fortawesome/react-fontawesome": "0.1.11",
"@tensorflow-models/coco-ssd": "1.0.0",
"@tensorflow/tfjs": "1.6.0",
"@tensorflow/tfjs-converter": "1.6.0",
"@tensorflow/tfjs-core": "1.6.0",
"babel-polyfill": "6.26.0",
"bowser": "^2.10.0",
"classnames": "2.2.6",

View File

@@ -1,7 +1,9 @@
import React from 'react';
import PropTypes from 'prop-types';
import * as cocoSsd from '@tensorflow-models/coco-ssd';
import CameraPhoto, { FACING_MODES } from 'jslib-html5-camera-photo';
import { injectIntl, intlShape } from '@edx/frontend-platform/i18n';
import { Form, Spinner } from '@edx/paragon';
import shutter from './data/camera-shutter.base64.json';
import messages from './IdVerification.messages';
@@ -11,9 +13,14 @@ class Camera extends React.Component {
super(props, context);
this.cameraPhoto = null;
this.videoRef = React.createRef();
this.canvasRef = React.createRef();
this.setDetection = this.setDetection.bind(this);
this.state = {
trackedObject: null,
dataUri: '',
outlineColor: '#ff3300',
videoHasLoaded: false,
shouldDetect: false,
isFinishedLoadingDetection: true,
};
}
@@ -26,6 +33,97 @@ class Camera extends React.Component {
this.cameraPhoto.stopCamera();
}
setDetection() {
this.setState(
{ shouldDetect: !this.state.shouldDetect },
() => {
if (this.state.shouldDetect) {
this.setState({ isFinishedLoadingDetection: false });
this.startDetection();
}
},
);
}
startDetection() {
setTimeout(() => {
if (this.state.videoHasLoaded) {
const loadModelPromise = cocoSsd.load();
Promise.all([loadModelPromise])
.then((values) => {
this.setState({ isFinishedLoadingDetection: true });
this.detectFromVideoFrame(values[0], this.videoRef.current);
});
} else {
this.setState({ isFinishedLoadingDetection: true });
this.setState({ shouldDetect: false });
// TODO: add error message
}
}, 1000);
}
detectFromVideoFrame = (model, video) => {
model.detect(video).then((predictions) => {
if (this.state.shouldDetect && !this.state.dataUri) {
this.showDetections(predictions);
requestAnimationFrame(() => {
this.detectFromVideoFrame(model, video);
});
}
});
};
showDetections = (predictions) => {
let canvasContext;
if (predictions.length > 0) {
canvasContext = this.canvasRef.current.getContext('2d');
canvasContext.clearRect(0, 0, canvasContext.canvas.width, canvasContext.canvas.height);
}
// predictions is an array of objects describing each detected face
predictions.forEach((prediction) => {
if (prediction.class === 'person') {
const xAdjustment = 70;
const yAdjustment = 55;
const x = prediction.bbox[0] - xAdjustment;
const y = prediction.bbox[1] - yAdjustment;
const width = prediction.bbox[2];
let isInPosition;
if (this.props.isPortrait) {
isInPosition = this.isInRangeForPortrait(x, y, width);
} else {
isInPosition = this.isInRangeForID(x, y, width);
}
// set the color depending on if all landmarks are in position
if (isInPosition) {
this.setState({ outlineColor: '#00ffff' });
} else {
this.setState({ outlineColor: '#ff3300' });
}
// Draw the bounding box.
canvasContext.strokeStyle = this.state.outlineColor;
canvasContext.lineWidth = 15;
canvasContext.strokeRect(0, 0, canvasContext.canvas.width, canvasContext.canvas.height);
}
});
}
isInRangeForPortrait(x, y, width) {
return x > -80 && x < 70 && y > -20 && y < 80 && width > 300 && width < 650;
}
isInRangeForID(x, y, width) {
return x > -60 && x < 10 && y > 0 && y < 150 && width > 230 && width < 540;
}
setVideoHasLoaded() {
this.setState({ videoHasLoaded: 'true' });
}
takePhoto() {
if (this.state.dataUri) {
return this.reset();
@@ -41,12 +139,15 @@ class Camera extends React.Component {
}
playShutterClick() {
const audio = new Audio('data:audio/mp3;base64,' + shutter.base64);
const audio = new Audio(`data:audio/mp3;base64,${shutter.base64}`);
audio.play();
}
reset() {
this.setState({ dataUri: '' });
if (this.state.shouldDetect) {
this.startDetection();
}
}
render() {
@@ -54,19 +155,39 @@ class Camera extends React.Component {
? 'do-transition camera-flash'
: 'camera-flash';
return (
<div className='camera-outer-wrapper shadow'>
<div className='camera-wrapper'>
<div className="camera-outer-wrapper shadow">
<Form.Group style={{ textAlign: 'left', padding: '0.5rem', marginBottom: '0.5rem' }} >
<Form.Check
id="videoDetection"
name="videoDetection"
label={this.props.intl.formatMessage(messages['id.verification.photo.enable.detection'])}
aria-describedby="videoDetectionHelpText"
checked={this.state.shouldDetect}
onChange={this.setDetection}
style={{ padding: '0rem', marginLeft: '1.25rem', float: this.state.isFinishedLoadingDetection ? 'none' : 'left' }}
/>
{!this.state.isFinishedLoadingDetection && <Spinner animation="border" variant="primary" style={{ marginLeft: '0.5rem' }} data-testid="spinner" />}
<Form.Text id="videoDetectionHelpText" data-testid="videoDetectionHelpText">
{this.props.isPortrait
? this.props.intl.formatMessage(messages['id.verification.photo.enable.detection.portrait.help.text'])
: this.props.intl.formatMessage(messages['id.verification.photo.enable.detection.id.help.text'])}
</Form.Text>
</Form.Group>
<div className="camera-wrapper">
<div className={cameraFlashClass} />
<video
ref={this.videoRef}
autoPlay={true}
className='camera-video'
data-testid="video"
autoPlay
className="camera-video"
onLoadedData={() => { this.setVideoHasLoaded(); }}
style={{ display: this.state.dataUri ? 'none' : 'block' }}
/>
<canvas ref={this.canvasRef} data-testid="detection-canvas" className="canvas-video" style={{ display: !this.state.shouldDetect || this.state.dataUri ? 'none' : 'block' }} height="375" width="500" />
<img
alt='imgCamera'
alt="imgCamera"
src={this.state.dataUri}
className='camera-video'
className="camera-video"
style={{ display: this.state.dataUri ? 'block' : 'none' }}
/>
</div>
@@ -76,7 +197,7 @@ class Camera extends React.Component {
'btn-outline-primary'
: 'btn-primary'
}`}
accessKey='c'
accessKey="c"
onClick={() => {
this.takePhoto();
}}
@@ -93,6 +214,7 @@ class Camera extends React.Component {
Camera.propTypes = {
intl: intlShape.isRequired,
onImageCapture: PropTypes.func.isRequired,
isPortrait: PropTypes.bool.isRequired,
};
export default injectIntl(Camera);

View File

@@ -81,6 +81,21 @@ const messages = defineMessages({
defaultMessage: 'Retake Photo',
description: 'Button to retake photo.',
},
'id.verification.photo.enable.detection': {
id: 'id.verification.photo.enable.detection',
defaultMessage: 'Enable Face Detection',
description: 'Text label for the checkbox to enable face detection.',
},
'id.verification.photo.enable.detection.portrait.help.text': {
id: 'id.verification.photo.enable.detection.portrait.help.text',
defaultMessage: 'If checked, a border will appear around the camera view. Your face can be seen clearly if the border is blue. If your face is not in a good position or undetectable, the border will be red.',
description: 'Help text that appears for enabling face detection on the portrait photo panel.',
},
'id.verification.photo.enable.detection.id.help.text': {
id: 'id.verification.photo.enable.detection.id.help.text',
defaultMessage: 'If checked, a border will appear around the camera view. The face can be seen clearly if the border is blue. If the face is not in a good position or undetectable, the border will be red.',
description: 'Help text that appears for enabling face detection on the portrait photo panel.',
},
'id.verification.camera.access.title': {
id: 'id.verification.camera.access.title',
defaultMessage: 'Camera Permissions',

View File

@@ -58,7 +58,7 @@ function IdVerificationContextProvider({ children }) {
tracks.forEach(track => track.stop());
setMediaStream(null);
}
}
},
};
// Call verification status endpoint to check whether we can verify.

View File

@@ -13,7 +13,7 @@
}
}
.form-check {
padding: 0.5rem 0.5rem 1rem;
padding: 0.5rem 0.5rem 1rem;
.form-check-label {
margin-left: 0.5rem;
padding-top: 0.2rem;
@@ -52,6 +52,13 @@
width: 100%;
}
.canvas-video {
width: 100%;
position: absolute;
top: 0;
left: 0;
}
.camera-btn {
margin: 10px;
}
@@ -68,4 +75,4 @@
opacity: 0;
background: white;
}
}
}

View File

@@ -24,7 +24,7 @@ function TakeIdPhotoPanel(props) {
<p>
{props.intl.formatMessage(messages['id.verification.id.photo.instructions.camera'])}
</p>
<Camera onImageCapture={setIdPhotoFile} />
<Camera onImageCapture={setIdPhotoFile} isPortrait={false} />
</div>
<CameraHelp />
<div className="action-row" style={{ visibility: idPhotoFile ? 'unset' : 'hidden' }}>

View File

@@ -33,7 +33,7 @@ function TakePortraitPhotoPanel(props) {
<p>
{props.intl.formatMessage(messages['id.verification.portrait.photo.instructions.camera'])}
</p>
<Camera onImageCapture={setFacePhotoFile} />
<Camera onImageCapture={setFacePhotoFile} isPortrait />
</div>
) : (
<div>

View File

@@ -4,10 +4,12 @@ import { createMemoryHistory } from 'history';
import '@testing-library/jest-dom/extend-expect';
import { render, cleanup, screen, act, fireEvent } from '@testing-library/react';
import { injectIntl, IntlProvider } from '@edx/frontend-platform/i18n';
import * as cocoSsd from '@tensorflow-models/coco-ssd';
import { IdVerificationContext } from '../IdVerificationContext';
import Camera from '../Camera';
jest.mock('jslib-html5-camera-photo');
jest.mock('@tensorflow-models/coco-ssd');
window.HTMLMediaElement.prototype.play = () => {};
@@ -19,6 +21,13 @@ describe('SubmittedPanel', () => {
const defaultProps = {
intl: {},
onImageCapture: jest.fn(),
isPortrait: true,
};
const idProps = {
intl: {},
onImageCapture: jest.fn(),
isPortrait: false,
};
const contextValue = {};
@@ -42,4 +51,87 @@ describe('SubmittedPanel', () => {
fireEvent.click(button);
expect(defaultProps.onImageCapture).toHaveBeenCalled();
});
it('shows correct help text for portrait photo capture', async () => {
await act(async () => render((
<Router history={history}>
<IntlProvider locale="en">
<IdVerificationContext.Provider value={contextValue}>
<IntlCamera {...defaultProps} />
</IdVerificationContext.Provider>
</IntlProvider>
</Router>
)));
const helpText = screen.getByTestId('videoDetectionHelpText');
expect(helpText.textContent).toEqual(expect.stringContaining('Your face can be seen clearly'));
});
it('shows correct help text for id photo capture', async () => {
await act(async () => render((
<Router history={history}>
<IntlProvider locale="en">
<IdVerificationContext.Provider value={contextValue}>
<IntlCamera {...idProps} />
</IdVerificationContext.Provider>
</IntlProvider>
</Router>
)));
const helpText = screen.getByTestId('videoDetectionHelpText');
expect(helpText.textContent).toEqual(expect.stringContaining('The face can be seen clearly'));
});
it('shows spinner when loading face detection', async () => {
cocoSsd.load = jest.fn().mockResolvedValue({ detect: jest.fn().mockResolvedValue([]) });
await act(async () => render((
<Router history={history}>
<IntlProvider locale="en">
<IdVerificationContext.Provider value={contextValue}>
<IntlCamera {...defaultProps} />
</IdVerificationContext.Provider>
</IntlProvider>
</Router>
)));
await fireEvent.loadedData(screen.queryByTestId('video'));
const checkbox = await screen.findByLabelText('Enable Face Detection');
fireEvent.click(checkbox);
expect(screen.queryByTestId('spinner')).toBeDefined();
});
it('canvas is visible when detection is enabled', async () => {
cocoSsd.load = jest.fn().mockResolvedValue({ detect: jest.fn().mockResolvedValue([]) });
await act(async () => render((
<Router history={history}>
<IntlProvider locale="en">
<IdVerificationContext.Provider value={contextValue}>
<IntlCamera {...defaultProps} />
</IdVerificationContext.Provider>
</IntlProvider>
</Router>
)));
await fireEvent.loadedData(screen.queryByTestId('video'));
expect(screen.queryByTestId('detection-canvas')).toHaveStyle('display:none');
const checkbox = await screen.findByLabelText('Enable Face Detection');
await fireEvent.click(checkbox);
expect(screen.queryByTestId('detection-canvas')).toHaveStyle('display:block');
});
it('blazeface is called when detection is enabled', async () => {
cocoSsd.load = jest.fn().mockResolvedValue({ detect: jest.fn().mockResolvedValue([]) });
await act(async () => render((
<Router history={history}>
<IntlProvider locale="en">
<IdVerificationContext.Provider value={contextValue}>
<IntlCamera {...defaultProps} />
</IdVerificationContext.Provider>
</IntlProvider>
</Router>
)));
await fireEvent.loadedData(screen.queryByTestId('video'));
const checkbox = await screen.findByLabelText('Enable Face Detection');
await fireEvent.click(checkbox);
setTimeout(() => { expect(cocoSsd.load).toHaveBeenCalled(); }, 2000);
});
});