test.vue 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. <template>
  2. <div id="facelogin">
  3. <h1 class="title is-1">{{ FaceisDetected }}</h1>
  4. <!-- <p>{{FaceisDetected}}</p> -->
  5. <div class="content-cam">
  6. <div class="camera-wrp sec">
  7. <video width="320" height="320" ref="videoDom" id="video_cam" preload autoplay loop muted></video>
  8. <canvas width="320" height="320" ref="canvasDOM" id="face_detect"></canvas>
  9. <div class="control-btn"></div>
  10. </div>
  11. <div class="images-wrp sec">
  12. <!-- <p class="title is-5">Image taken</p> -->
  13. <div
  14. :class="`img-item img-item-${index}`"
  15. v-for="(image, index) in images"
  16. :key="`img-wrp-${index}`"
  17. :style="`background-image: url('${image}')`"
  18. ></div>
  19. </div>
  20. </div>
  21. </div>
  22. </template>
  23. <script>
  24. import tracking from "@/assets/js/tracking-min.js";
  25. import "@/assets/js/face-min.js";
  26. export default {
  27. name: "facelogin",
  28. data() {
  29. return {
  30. count: 0,
  31. isdetected: "请您保持脸部在画面中央",
  32. videoEl: {},
  33. canvasEL: {},
  34. images: [],
  35. trackCcv: false,
  36. trackTracking: false,
  37. autoCaptureTrackTraking: false,
  38. userMediaConstraints: {
  39. audio: false,
  40. video: {
  41. // ideal(应用最理想的)
  42. width: {
  43. min: 320,
  44. ideal: 1280,
  45. max: 1920
  46. },
  47. height: {
  48. min: 240,
  49. ideal: 720,
  50. max: 1080
  51. },
  52. // frameRate受限带宽传输时,低帧率可能更适宜
  53. frameRate: {
  54. min: 15,
  55. ideal: 30,
  56. max: 60
  57. },
  58. // 摄像头翻转
  59. facingMode: "user"
  60. }
  61. }
  62. };
  63. },
  64. created() {
  65. this.changeView();
  66. },
  67. computed: {
  68. FaceisDetected() {
  69. return this.isdetected;
  70. }
  71. },
  72. mounted() {
  73. // The getUserMedia interface is used for handling camera input.
  74. // Some browsers need a prefix so here we're covering all the options
  75. navigator.getMedia =
  76. navigator.getUserMedia ||
  77. navigator.webkitGetUserMedia ||
  78. navigator.mozGetUserMedia ||
  79. navigator.msGetUserMedia;
  80. this.init();
  81. },
  82. methods: {
  83. async init() {
  84. this.videoEl = this.$refs.videoDom;
  85. this.canvasEL = this.$refs.canvasDOM;
  86. await navigator.mediaDevices
  87. .getUserMedia(this.userMediaConstraints)
  88. .then(this.getMediaStreamSuccess)
  89. .catch(this.getMediaStreamError);
  90. await this.onPlay();
  91. },
  92. async onPlay() {
  93. // debugHelper.log("onPlay");
  94. this.onTrackTracking();
  95. },
  96. changeView() {
  97. this.setTitle("刷脸登陆");
  98. this.setBackDisabled(false);
  99. this.setBackIcon("arrow_back");
  100. msgbus.vm.setBottomNavVisible(false);
  101. msgbus.vm.setBottomBtnVisible(false);
  102. msgbus.vm.setMsgInputVisible({ value: false });
  103. },
  104. onTrackTracking() {
  105. const context = this;
  106. const video = this.videoEl;
  107. const canvas = this.canvasEL;
  108. const canvasContext = canvas.getContext("2d");
  109. let tracker = new window.tracking.ObjectTracker("face");
  110. video.pause();
  111. video.src = "";
  112. tracker.setInitialScale(4);
  113. tracker.setStepSize(2);
  114. tracker.setEdgesDensity(0.1);
  115. window.tracking.track("#video_cam", tracker, { camera: true });
  116. tracker.on("track", function(event) {
  117. const { autoCaptureTrackTraking } = context;
  118. canvasContext.clearRect(0, 0, canvas.width, canvas.height);
  119. event.data.forEach(function({ x, y, width, height }) {
  120. canvasContext.strokeStyle = "#a64ceb";
  121. canvasContext.strokeRect(x, y, width, height);
  122. canvasContext.font = "11px Helvetica";
  123. canvasContext.fillStyle = "#fff";
  124. });
  125. // if (!isEmpty(event.data) && context.count <= 10) {
  126. if (!(event.data.length == 0) && context.count <= 10) {
  127. if (context.count < 0) context.count = 0;
  128. context.count += 1;
  129. //debugHelper.log(context.count)
  130. if (context.count > 10) {
  131. context.isdetected = "已检测到人脸,正在登录";
  132. //context.$router.push({ name: 'pwdlogin' })
  133. }
  134. } else {
  135. context.count -= 1;
  136. if (context.count < 0) context.isdetected = "请您保持脸部在画面中央";
  137. //this.isdetected = '已检测到人脸,正在登录'
  138. }
  139. });
  140. },
  141. onDownloadFile(item) {
  142. const link = document.createElement("a");
  143. link.href = item;
  144. link.download = `cahyo-${new Date().toISOString()}.png`;
  145. link.click();
  146. link.remove();
  147. },
  148. onTakeCam() {
  149. const canvas = document.createElement("canvas");
  150. const video = this.$el.querySelector("#video_cam");
  151. const canvasContext = canvas.getContext("2d");
  152. if (video.videoWidth && video.videoHeight) {
  153. const isBiggerW = video.videoWidth > video.videoHeight;
  154. const fixVidSize = isBiggerW ? video.videoHeight : video.videoWidth;
  155. let offsetLeft = 0;
  156. let offsetTop = 0;
  157. if (isBiggerW) offsetLeft = (video.videoWidth - fixVidSize) / 2;
  158. else offsetTop = (video.videoHeight - fixVidSize) / 2;
  159. // make canvas size 300px
  160. canvas.width = canvas.height = 300;
  161. const { width, height } = canvas;
  162. canvasContext.drawImage(
  163. video,
  164. offsetLeft,
  165. offsetTop,
  166. fixVidSize,
  167. fixVidSize,
  168. 0,
  169. 0,
  170. width,
  171. height
  172. );
  173. const image = canvas.toDataURL("image/png");
  174. this.images.push(image);
  175. }
  176. },
  177. onDetectFace(param, index) {
  178. const imgItem = document.querySelector(`.img-item-${index}`);
  179. const image = new Image();
  180. image.src = param;
  181. const tracker = new tracking.ObjectTracker("face");
  182. tracker.setStepSize(1.7);
  183. tracking.track(image, tracker);
  184. tracker.on("track", function(event) {
  185. event.data.forEach(function(rect) {
  186. window.plot(rect.x, rect.y, rect.width, rect.height);
  187. });
  188. });
  189. window.plot = function(x, y, w, h) {
  190. const rect = document.createElement("div");
  191. document.querySelector(`.img-item-${index}`).appendChild(rect);
  192. rect.classList.add("rect");
  193. rect.style.width = w + "px";
  194. rect.style.height = h + "px";
  195. rect.style.left = x + "px";
  196. rect.style.top = y + "px";
  197. rect.style.border = "2px solid yellow";
  198. rect.style.position = "absolute";
  199. };
  200. },
  201. getMediaStreamSuccess(stream) {
  202. window.stream = stream; // make stream available to browser console
  203. this.videoEl.srcObject = stream;
  204. // debugHelper.log("getMediaStreamSuccess1");
  205. //this.$store.commit('setVideoCanvasObject', this.videoEl)
  206. // debugHelper.log("getMediaStreamSuccess2");
  207. },
  208. // 视频媒体流失败
  209. getMediaStreamError(error) {
  210. alert("视频媒体流获取错误" + error);
  211. },
  212. // 结束媒体流
  213. stopMediaStreamTrack() {
  214. clearInterval(this.timeInterval);
  215. if (typeof window.stream === "object") {
  216. this.videoEl.srcObject = null;
  217. //this.$store.commit('setVideoCanvasObject', '')
  218. window.stream.getTracks().forEach(track => track.stop());
  219. }
  220. }
  221. }
  222. };
  223. </script>