Skip to content

Commit

Permalink
Realtime Video has been corrected with UI
Browse files Browse the repository at this point in the history
  • Loading branch information
jivebreaddev authored Oct 17, 2021
1 parent 4bc0802 commit 0fa5f4c
Show file tree
Hide file tree
Showing 2 changed files with 234 additions and 46 deletions.
Binary file modified AI(BE)/bullseyes/single_image.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
280 changes: 234 additions & 46 deletions AI(FE)/web-admin/src/FaceApi.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,43 @@

import React, { useState, useEffect, useRef } from "react";
import ReactDOM from "react-dom";
import * as faceApi from "face-api.js";
import axios from 'axios';
import InitialPosting from './myDataProvider';
import axios from 'axios';
import "./styles.css";
import Grid from '@material-ui/core/Grid';
import { Card, CardContent, CardMedia, CardHeader } from "@material-ui/core";
import { makeStyles } from '@material-ui/core/styles';

const ImageDetection = () => {
const useStyles = makeStyles({
video: {
position: "absolute",
right: "675px",
top: "200px",
width: "204px",
height: "202px",
},
text: {
position: "absolute",
right: "600px",
top: "450px",
width: "354px",
height: "202px",
},
picture: {
position: "absolute",
'border-radius': "30px",
border: "1px solid",
left: "24px",
top: "21px",
width: "156px",
height: "156px",
},
});

const [date, setDate] = useState();
const ImageDetection = (prop) => {
const classes = useStyles();
const [identity, setIdentity] = useState("");
const [date, setDate] = useState();
const displayWidth = 450;
const displayHeight = 360;
const [pic, setPic] = useState();
Expand All @@ -27,6 +56,7 @@ const ImageDetection = () => {
);
setInitialized(true);
setDate(new Date());

};
models();
}, []);
Expand All @@ -37,16 +67,17 @@ const ImageDetection = () => {
];
let faceImages = await faceApi.extractFaces(Image, extractRegions);
faceImages.forEach((canvas) => {
setPic(canvas.toDataURL());
setPic(canvas.toDataURL('image/jpeg', 1));

setDate(new Date());
axios({
method: 'post',
url: "https://osamhack2021-ai-web-bullseyes-bullseyes-7v5x5w6jwfx5xj-8000.githubpreview.dev/accessusers/",
data: {
photourl: canvas.toDataURL(), time: date.toISOString()
photourl: canvas.toDataURL('image/jpeg', 1), time: date.toISOString()
}
}).then(function (response) {
console.log(response.data);
setIdentity(response.data);
});
});
Expand All @@ -68,57 +99,214 @@ const ImageDetection = () => {
};
const VideoPlay = () => {
setInterval(async () => {
if (initialized) {
setInitialized(false);
canvasRef.current.innerHTML = faceApi.createCanvasFromMedia(
videoRef.current
);
}
try {
if (initialized) {
setInitialized(false);
canvasRef.current.innerHTML = faceApi.createCanvasFromMedia(
videoRef.current
);
}

faceApi.matchDimensions(canvasRef.current, videoSize);

faceApi.matchDimensions(canvasRef.current, videoSize);
const detections = await faceApi.detectAllFaces(videoRef.current, option);

const detections = await faceApi.detectAllFaces(videoRef.current, option);
if (detections[0]) {
extractFaces(videoRef.current, detections[0].box);
}
const resizedDectect = faceApi.resizeResults(detections, videoSize);
canvasRef.current
.getContext("2d")
.clearRect(0, 0, displayWidth, displayHeight);
faceApi.draw.drawDetections(canvasRef.current, resizedDectect);
const box = { x: 120, y: 100, width: 200, height: 200 };

const drawOptions = {
label: "Put Your Face Here",
lineWidth: 5
};
const drawBox = new faceApi.draw.DrawBox(box, drawOptions);
drawBox.draw(canvasRef.current);
}
catch (err) {

if (detections[0]) {
extractFaces(videoRef.current, detections[0].box);
}
const resizedDectect = faceApi.resizeResults(detections, videoSize);
canvasRef.current
.getContext("2d")
.clearRect(0, 0, displayWidth, displayHeight);
faceApi.draw.drawDetections(canvasRef.current, resizedDectect);
const box = { x: 161, y: 180, width: 128, height: 128 };

const drawOptions = {
label: "Put Your Face Here",
lineWidth: 5
};
const drawBox = new faceApi.draw.DrawBox(box, drawOptions);
drawBox.draw(canvasRef.current);
}, 5000);
};
// styling to css file? or other methods
return (
<div>
<div className="Videodiv">
{/* <video ref={videoRef} autoPlay muted className="Video" onPlay={VideoPlay} src="media/example.mp4" /> */}

<video
className="Video"
height={displayWidth}
width={displayWidth}
ref={videoRef}
autoPlay
muted
onPlay={VideoPlay}
/>
<canvas className="Canvas" ref={canvasRef} />
</div>
<br></br>
<img className="Canvas" src={pic} />
<Grid container spacing={2}>
<Grid item xs={8}>

<div className="Videodiv">
<video
className="Video"
height={displayWidth}
width={displayWidth}
ref={videoRef}
autoPlay
muted
onPlay={VideoPlay}
/>
<canvas className="Canvas" ref={canvasRef} />
</div>

</Grid>
<Card className={classes.video}>
<CardMedia
className={classes.picture}
component="img"
image={pic}
/>
</Card>
<Card className={classes.text}>
{typeof(identity)=="string" ? "" :
<div className="fonts">
<p>
<div className="head">{identity.data.name}</div>
<div className="opacity">{identity.data.company}</div>
<div className="opacity">{identity.data.rank}</div>
<div className="opacity">{
identity.data.time.substr(0,10)
+ " " +
identity.data.time.substr(11,8)
}</div>
<div className="opacity">{identity.data.place}</div>
<div className="opacity">{identity.data.altid}</div>
</p>
</div>
}
</Card>
</Grid>
<div className="title1">비디오</div>
<div className="title2">사용자 정보</div>
</div>
);

};
export default ImageDetection;
// const ImageDetection = () => {

// const [date, setDate] = useState();
// const [identity, setIdentity] = useState("");
// const displayWidth = 450;
// const displayHeight = 360;
// const [pic, setPic] = useState();
// const [initialized, setInitialized] = useState(false);
// const canvasRef = useRef();
// const videoRef = useRef();

// const option = new faceApi.TinyFaceDetectorOptions({
// inputSize: 256,
// scoreThreshold: 0.8
// });
// useEffect(() => {
// const models = async () => {
// Promise.all([faceApi.nets.tinyFaceDetector.load("/models/")]).then(
// setupVideo
// );
// setInitialized(true);
// setDate(new Date());
// };
// models();
// }, []);
// async function extractFaces(Image, box) {
// if (box) {
// const extractRegions = [
// new faceApi.Rect(box.x, box.y, box.width, box.height)
// ];
// let faceImages = await faceApi.extractFaces(Image, extractRegions);
// faceImages.forEach((canvas) => {
// setPic(canvas.toDataURL());

// setDate(new Date());
// axios({
// method: 'post',
// url: "https://osamhack2021-ai-web-bullseyes-bullseyes-7v5x5w6jwfx5xj-8000.githubpreview.dev/accessusers/",
// data: {
// photourl: canvas.toDataURL(), time: date.toISOString()
// }
// }).then(function (response) {
// setIdentity(response.data);
// });
// });


// }
// }
// const setupVideo = () => {
// navigator.mediaDevices
// .getUserMedia({
// video: { facingMode: "user" }
// })
// .then((stream) => (videoRef.current.srcObject = stream));
// // navigator.getUserMedia().then(stream => videoRef.current.srcObject = stream).catch(console.log)
// };
// const videoSize = {
// width: displayWidth,
// height: displayHeight
// };
// const VideoPlay = () => {
// setInterval(async () => {
// if (initialized) {
// setInitialized(false);
// canvasRef.current.innerHTML = faceApi.createCanvasFromMedia(
// videoRef.current
// );
// }

// faceApi.matchDimensions(canvasRef.current, videoSize);

// const detections = await faceApi.detectAllFaces(videoRef.current, option);

// if (detections[0]) {
// extractFaces(videoRef.current, detections[0].box);
// }
// const resizedDectect = faceApi.resizeResults(detections, videoSize);
// canvasRef.current
// .getContext("2d")
// .clearRect(0, 0, displayWidth, displayHeight);
// faceApi.draw.drawDetections(canvasRef.current, resizedDectect);
// const box = { x: 161, y: 180, width: 128, height: 128 };

// const drawOptions = {
// label: "Put Your Face Here",
// lineWidth: 5
// };
// const drawBox = new faceApi.draw.DrawBox(box, drawOptions);
// drawBox.draw(canvasRef.current);
// }, 5000);
// };
// // styling to css file? or other methods
// return (
// <div>
// <div className="Videodiv">
// {/* <video ref={videoRef} autoPlay muted className="Video" onPlay={VideoPlay} src="media/example.mp4" /> */}


// <canvas className="Canvas" ref={canvasRef} />
// </div>
// <br></br>
// <img className="Canvas" src={pic} />
// </div>
// );

// };








export default ImageDetection;
// <video
// className="Video"
// height={displayWidth}
// width={displayWidth}
// ref={videoRef}
// autoPlay
// muted
// onPlay={VideoPlay}
// />

0 comments on commit 0fa5f4c

Please sign in to comment.