tracking.js 103 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111
  1. /**
  2. * tracking - A modern approach for Computer Vision on the web.
  3. * @author Eduardo Lundgren <edu@rdo.io>
  4. * @version v1.1.3
  5. * @link http://trackingjs.com
  6. * @license BSD
  7. */
  8. (function(window, undefined) {
  9. window.tracking = window.tracking || {};
  10. /**
  11. * Inherit the prototype methods from one constructor into another.
  12. *
  13. * Usage:
  14. * <pre>
  15. * function ParentClass(a, b) { }
  16. * ParentClass.prototype.foo = function(a) { }
  17. *
  18. * function ChildClass(a, b, c) {
  19. * tracking.base(this, a, b);
  20. * }
  21. * tracking.inherits(ChildClass, ParentClass);
  22. *
  23. * var child = new ChildClass('a', 'b', 'c');
  24. * child.foo();
  25. * </pre>
  26. *
  27. * @param {Function} childCtor Child class.
  28. * @param {Function} parentCtor Parent class.
  29. */
  30. tracking.inherits = function(childCtor, parentCtor) {
  31. function TempCtor() {
  32. }
  33. TempCtor.prototype = parentCtor.prototype;
  34. childCtor.superClass_ = parentCtor.prototype;
  35. childCtor.prototype = new TempCtor();
  36. childCtor.prototype.constructor = childCtor;
  37. /**
  38. * Calls superclass constructor/method.
  39. *
  40. * This function is only available if you use tracking.inherits to express
  41. * inheritance relationships between classes.
  42. *
  43. * @param {!object} me Should always be "this".
  44. * @param {string} methodName The method name to call. Calling superclass
  45. * constructor can be done with the special string 'constructor'.
  46. * @param {...*} var_args The arguments to pass to superclass
  47. * method/constructor.
  48. * @return {*} The return value of the superclass method/constructor.
  49. */
  50. childCtor.base = function(me, methodName) {
  51. var args = Array.prototype.slice.call(arguments, 2);
  52. return parentCtor.prototype[methodName].apply(me, args);
  53. };
  54. };
  55. /**
  56. * Captures the user camera when tracking a video element and set its source
  57. * to the camera stream.
  58. * @param {HTMLVideoElement} element Canvas element to track.
  59. * @param {object} opt_options Optional configuration to the tracker.
  60. */
  61. tracking.initUserMedia_ = function(element, opt_options) {
  62. window.navigator.mediaDevices.getUserMedia({
  63. video: true,
  64. audio: (opt_options && opt_options.audio) ? true : false,
  65. }).then(function(stream) {
  66. element.srcObject = stream;
  67. }).catch(function(err) {
  68. throw Error('Cannot capture user camera.');
  69. });
  70. };
  71. /**
  72. * Tests whether the object is a dom node.
  73. * @param {object} o Object to be tested.
  74. * @return {boolean} True if the object is a dom node.
  75. */
  76. tracking.isNode = function(o) {
  77. return o.nodeType || this.isWindow(o);
  78. };
  79. /**
  80. * Tests whether the object is the `window` object.
  81. * @param {object} o Object to be tested.
  82. * @return {boolean} True if the object is the `window` object.
  83. */
  84. tracking.isWindow = function(o) {
  85. return !!(o && o.alert && o.document);
  86. };
  87. /**
  88. * Selects a dom node from a CSS3 selector using `document.querySelector`.
  89. * @param {string} selector
  90. * @param {object} opt_element The root element for the query. When not
  91. * specified `document` is used as root element.
  92. * @return {HTMLElement} The first dom element that matches to the selector.
  93. * If not found, returns `null`.
  94. */
  95. tracking.one = function(selector, opt_element) {
  96. if (this.isNode(selector)) {
  97. return selector;
  98. }
  99. return (opt_element || document).querySelector(selector);
  100. };
  101. /**
  102. * Tracks a canvas, image or video element based on the specified `tracker`
  103. * instance. This method extract the pixel information of the input element
  104. * to pass to the `tracker` instance. When tracking a video, the
  105. * `tracker.track(pixels, width, height)` will be in a
  106. * `requestAnimationFrame` loop in order to track all video frames.
  107. *
  108. * Example:
  109. * var tracker = new tracking.ColorTracker();
  110. *
  111. * tracking.track('#video', tracker);
  112. * or
  113. * tracking.track('#video', tracker, { camera: true });
  114. *
  115. * tracker.on('track', function(event) {
  116. * // console.log(event.data[0].x, event.data[0].y)
  117. * });
  118. *
  119. * @param {HTMLElement} element The element to track, canvas, image or
  120. * video.
  121. * @param {tracking.Tracker} tracker The tracker instance used to track the
  122. * element.
  123. * @param {object} opt_options Optional configuration to the tracker.
  124. */
  125. tracking.track = function(element, tracker, opt_options) {
  126. element = tracking.one(element);
  127. if (!element) {
  128. throw new Error('Element not found, try a different element or selector.');
  129. }
  130. if (!tracker) {
  131. throw new Error('Tracker not specified, try `tracking.track(element, new tracking.FaceTracker())`.');
  132. }
  133. switch (element.nodeName.toLowerCase()) {
  134. case 'canvas':
  135. return this.trackCanvas_(element, tracker, opt_options);
  136. case 'img':
  137. return this.trackImg_(element, tracker, opt_options);
  138. case 'video':
  139. if (opt_options) {
  140. if (opt_options.camera) {
  141. this.initUserMedia_(element, opt_options);
  142. }
  143. }
  144. return this.trackVideo_(element, tracker, opt_options);
  145. default:
  146. throw new Error('Element not supported, try in a canvas, img, or video.');
  147. }
  148. };
  149. /**
  150. * Tracks a canvas element based on the specified `tracker` instance and
  151. * returns a `TrackerTask` for this track.
  152. * @param {HTMLCanvasElement} element Canvas element to track.
  153. * @param {tracking.Tracker} tracker The tracker instance used to track the
  154. * element.
  155. * @param {object} opt_options Optional configuration to the tracker.
  156. * @return {tracking.TrackerTask}
  157. * @private
  158. */
  159. tracking.trackCanvas_ = function(element, tracker) {
  160. var self = this;
  161. var task = new tracking.TrackerTask(tracker);
  162. task.on('run', function() {
  163. self.trackCanvasInternal_(element, tracker);
  164. });
  165. return task.run();
  166. };
  167. /**
  168. * Tracks a canvas element based on the specified `tracker` instance. This
  169. * method extract the pixel information of the input element to pass to the
  170. * `tracker` instance.
  171. * @param {HTMLCanvasElement} element Canvas element to track.
  172. * @param {tracking.Tracker} tracker The tracker instance used to track the
  173. * element.
  174. * @param {object} opt_options Optional configuration to the tracker.
  175. * @private
  176. */
  177. tracking.trackCanvasInternal_ = function(element, tracker) {
  178. var width = element.width;
  179. var height = element.height;
  180. var context = element.getContext('2d');
  181. var imageData = context.getImageData(0, 0, width, height);
  182. tracker.track(imageData.data, width, height);
  183. };
  184. /**
  185. * Tracks a image element based on the specified `tracker` instance. This
  186. * method extract the pixel information of the input element to pass to the
  187. * `tracker` instance.
  188. * @param {HTMLImageElement} element Canvas element to track.
  189. * @param {tracking.Tracker} tracker The tracker instance used to track the
  190. * element.
  191. * @param {object} opt_options Optional configuration to the tracker.
  192. * @private
  193. */
  194. tracking.trackImg_ = function(element, tracker) {
  195. var width = element.width;
  196. var height = element.height;
  197. var canvas = document.createElement('canvas');
  198. canvas.width = width;
  199. canvas.height = height;
  200. var task = new tracking.TrackerTask(tracker);
  201. task.on('run', function() {
  202. tracking.Canvas.loadImage(canvas, element.src, 0, 0, width, height, function() {
  203. tracking.trackCanvasInternal_(canvas, tracker);
  204. });
  205. });
  206. return task.run();
  207. };
  208. /**
  209. * Tracks a video element based on the specified `tracker` instance. This
  210. * method extract the pixel information of the input element to pass to the
  211. * `tracker` instance. The `tracker.track(pixels, width, height)` will be in
  212. * a `requestAnimationFrame` loop in order to track all video frames.
  213. * @param {HTMLVideoElement} element Canvas element to track.
  214. * @param {tracking.Tracker} tracker The tracker instance used to track the
  215. * element.
  216. * @param {object} opt_options Optional configuration to the tracker.
  217. * @private
  218. */
  219. tracking.trackVideo_ = function(element, tracker) {
  220. var canvas = document.createElement('canvas');
  221. var context = canvas.getContext('2d');
  222. var width;
  223. var height;
  224. var resizeCanvas_ = function() {
  225. width = element.offsetWidth;
  226. height = element.offsetHeight;
  227. canvas.width = width;
  228. canvas.height = height;
  229. };
  230. resizeCanvas_();
  231. element.addEventListener('resize', resizeCanvas_);
  232. var requestId;
  233. var requestAnimationFrame_ = function() {
  234. requestId = window.requestAnimationFrame(function() {
  235. if (element.readyState === element.HAVE_ENOUGH_DATA) {
  236. try {
  237. // Firefox v~30.0 gets confused with the video readyState firing an
  238. // erroneous HAVE_ENOUGH_DATA just before HAVE_CURRENT_DATA state,
  239. // hence keep trying to read it until resolved.
  240. context.drawImage(element, 0, 0, width, height);
  241. } catch (err) {}
  242. tracking.trackCanvasInternal_(canvas, tracker);
  243. }
  244. requestAnimationFrame_();
  245. });
  246. };
  247. var task = new tracking.TrackerTask(tracker);
  248. task.on('stop', function() {
  249. window.cancelAnimationFrame(requestId);
  250. });
  251. task.on('run', function() {
  252. requestAnimationFrame_();
  253. });
  254. return task.run();
  255. };
  256. // Browser polyfills
  257. //===================
  258. if (!window.URL) {
  259. window.URL = window.URL || window.webkitURL || window.msURL || window.oURL;
  260. }
  261. if (!navigator.getUserMedia) {
  262. navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
  263. navigator.mozGetUserMedia || navigator.msGetUserMedia;
  264. }
  265. }(window));
  266. (function() {
  267. /**
  268. * EventEmitter utility.
  269. * @constructor
  270. */
  271. tracking.EventEmitter = function() {};
  272. /**
  273. * Holds event listeners scoped by event type.
  274. * @type {object}
  275. * @private
  276. */
  277. tracking.EventEmitter.prototype.events_ = null;
  278. /**
  279. * Adds a listener to the end of the listeners array for the specified event.
  280. * @param {string} event
  281. * @param {function} listener
  282. * @return {object} Returns emitter, so calls can be chained.
  283. */
  284. tracking.EventEmitter.prototype.addListener = function(event, listener) {
  285. if (typeof listener !== 'function') {
  286. throw new TypeError('Listener must be a function');
  287. }
  288. if (!this.events_) {
  289. this.events_ = {};
  290. }
  291. this.emit('newListener', event, listener);
  292. if (!this.events_[event]) {
  293. this.events_[event] = [];
  294. }
  295. this.events_[event].push(listener);
  296. return this;
  297. };
  298. /**
  299. * Returns an array of listeners for the specified event.
  300. * @param {string} event
  301. * @return {array} Array of listeners.
  302. */
  303. tracking.EventEmitter.prototype.listeners = function(event) {
  304. return this.events_ && this.events_[event];
  305. };
  306. /**
  307. * Execute each of the listeners in order with the supplied arguments.
  308. * @param {string} event
  309. * @param {*} opt_args [arg1], [arg2], [...]
  310. * @return {boolean} Returns true if event had listeners, false otherwise.
  311. */
  312. tracking.EventEmitter.prototype.emit = function(event) {
  313. var listeners = this.listeners(event);
  314. if (listeners) {
  315. var args = Array.prototype.slice.call(arguments, 1);
  316. for (var i = 0; i < listeners.length; i++) {
  317. if (listeners[i]) {
  318. listeners[i].apply(this, args);
  319. }
  320. }
  321. return true;
  322. }
  323. return false;
  324. };
  325. /**
  326. * Adds a listener to the end of the listeners array for the specified event.
  327. * @param {string} event
  328. * @param {function} listener
  329. * @return {object} Returns emitter, so calls can be chained.
  330. */
  331. tracking.EventEmitter.prototype.on = tracking.EventEmitter.prototype.addListener;
  332. /**
  333. * Adds a one time listener for the event. This listener is invoked only the
  334. * next time the event is fired, after which it is removed.
  335. * @param {string} event
  336. * @param {function} listener
  337. * @return {object} Returns emitter, so calls can be chained.
  338. */
  339. tracking.EventEmitter.prototype.once = function(event, listener) {
  340. var self = this;
  341. self.on(event, function handlerInternal() {
  342. self.removeListener(event, handlerInternal);
  343. listener.apply(this, arguments);
  344. });
  345. };
  346. /**
  347. * Removes all listeners, or those of the specified event. It's not a good
  348. * idea to remove listeners that were added elsewhere in the code,
  349. * especially when it's on an emitter that you didn't create.
  350. * @param {string} event
  351. * @return {object} Returns emitter, so calls can be chained.
  352. */
  353. tracking.EventEmitter.prototype.removeAllListeners = function(opt_event) {
  354. if (!this.events_) {
  355. return this;
  356. }
  357. if (opt_event) {
  358. delete this.events_[opt_event];
  359. } else {
  360. delete this.events_;
  361. }
  362. return this;
  363. };
  364. /**
  365. * Remove a listener from the listener array for the specified event.
  366. * Caution: changes array indices in the listener array behind the listener.
  367. * @param {string} event
  368. * @param {function} listener
  369. * @return {object} Returns emitter, so calls can be chained.
  370. */
  371. tracking.EventEmitter.prototype.removeListener = function(event, listener) {
  372. if (typeof listener !== 'function') {
  373. throw new TypeError('Listener must be a function');
  374. }
  375. if (!this.events_) {
  376. return this;
  377. }
  378. var listeners = this.listeners(event);
  379. if (Array.isArray(listeners)) {
  380. var i = listeners.indexOf(listener);
  381. if (i < 0) {
  382. return this;
  383. }
  384. listeners.splice(i, 1);
  385. }
  386. return this;
  387. };
  388. /**
  389. * By default EventEmitters will print a warning if more than 10 listeners
  390. * are added for a particular event. This is a useful default which helps
  391. * finding memory leaks. Obviously not all Emitters should be limited to 10.
  392. * This function allows that to be increased. Set to zero for unlimited.
  393. * @param {number} n The maximum number of listeners.
  394. */
  395. tracking.EventEmitter.prototype.setMaxListeners = function() {
  396. throw new Error('Not implemented');
  397. };
  398. }());
  399. (function() {
  400. /**
  401. * Canvas utility.
  402. * @static
  403. * @constructor
  404. */
  405. tracking.Canvas = {};
  406. /**
  407. * Loads an image source into the canvas.
  408. * @param {HTMLCanvasElement} canvas The canvas dom element.
  409. * @param {string} src The image source.
  410. * @param {number} x The canvas horizontal coordinate to load the image.
  411. * @param {number} y The canvas vertical coordinate to load the image.
  412. * @param {number} width The image width.
  413. * @param {number} height The image height.
  414. * @param {function} opt_callback Callback that fires when the image is loaded
  415. * into the canvas.
  416. * @static
  417. */
  418. tracking.Canvas.loadImage = function(canvas, src, x, y, width, height, opt_callback) {
  419. var instance = this;
  420. var img = new window.Image();
  421. img.crossOrigin = '*';
  422. img.onload = function() {
  423. var context = canvas.getContext('2d');
  424. canvas.width = width;
  425. canvas.height = height;
  426. context.drawImage(img, x, y, width, height);
  427. if (opt_callback) {
  428. opt_callback.call(instance);
  429. }
  430. img = null;
  431. };
  432. img.src = src;
  433. };
  434. }());
  435. (function() {
  436. /**
  437. * DisjointSet utility with path compression. Some applications involve
  438. * grouping n distinct objects into a collection of disjoint sets. Two
  439. * important operations are then finding which set a given object belongs to
  440. * and uniting the two sets. A disjoint set data structure maintains a
  441. * collection S={ S1 , S2 ,..., Sk } of disjoint dynamic sets. Each set is
  442. * identified by a representative, which usually is a member in the set.
  443. * @static
  444. * @constructor
  445. */
  446. tracking.DisjointSet = function(length) {
  447. if (length === undefined) {
  448. throw new Error('DisjointSet length not specified.');
  449. }
  450. this.length = length;
  451. this.parent = new Uint32Array(length);
  452. for (var i = 0; i < length; i++) {
  453. this.parent[i] = i;
  454. }
  455. };
  456. /**
  457. * Holds the length of the internal set.
  458. * @type {number}
  459. */
  460. tracking.DisjointSet.prototype.length = null;
  461. /**
  462. * Holds the set containing the representative values.
  463. * @type {Array.<number>}
  464. */
  465. tracking.DisjointSet.prototype.parent = null;
  466. /**
  467. * Finds a pointer to the representative of the set containing i.
  468. * @param {number} i
  469. * @return {number} The representative set of i.
  470. */
  471. tracking.DisjointSet.prototype.find = function(i) {
  472. if (this.parent[i] === i) {
  473. return i;
  474. } else {
  475. return (this.parent[i] = this.find(this.parent[i]));
  476. }
  477. };
  478. /**
  479. * Unites two dynamic sets containing objects i and j, say Si and Sj, into
  480. * a new set that Si ∪ Sj, assuming that Si ∩ Sj = ∅;
  481. * @param {number} i
  482. * @param {number} j
  483. */
  484. tracking.DisjointSet.prototype.union = function(i, j) {
  485. var iRepresentative = this.find(i);
  486. var jRepresentative = this.find(j);
  487. this.parent[iRepresentative] = jRepresentative;
  488. };
  489. }());
  490. (function() {
  491. /**
  492. * Image utility.
  493. * @static
  494. * @constructor
  495. */
  496. tracking.Image = {};
  497. /**
  498. * Computes gaussian blur. Adapted from
  499. * https://github.com/kig/canvasfilters.
  500. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  501. * @param {number} width The image width.
  502. * @param {number} height The image height.
  503. * @param {number} diameter Gaussian blur diameter, must be greater than 1.
  504. * @return {array} The edge pixels in a linear [r,g,b,a,...] array.
  505. */
  506. tracking.Image.blur = function(pixels, width, height, diameter) {
  507. diameter = Math.abs(diameter);
  508. if (diameter <= 1) {
  509. throw new Error('Diameter should be greater than 1.');
  510. }
  511. var radius = diameter / 2;
  512. var len = Math.ceil(diameter) + (1 - (Math.ceil(diameter) % 2));
  513. var weights = new Float32Array(len);
  514. var rho = (radius + 0.5) / 3;
  515. var rhoSq = rho * rho;
  516. var gaussianFactor = 1 / Math.sqrt(2 * Math.PI * rhoSq);
  517. var rhoFactor = -1 / (2 * rho * rho);
  518. var wsum = 0;
  519. var middle = Math.floor(len / 2);
  520. for (var i = 0; i < len; i++) {
  521. var x = i - middle;
  522. var gx = gaussianFactor * Math.exp(x * x * rhoFactor);
  523. weights[i] = gx;
  524. wsum += gx;
  525. }
  526. for (var j = 0; j < weights.length; j++) {
  527. weights[j] /= wsum;
  528. }
  529. return this.separableConvolve(pixels, width, height, weights, weights, false);
  530. };
  531. /**
  532. * Computes the integral image for summed, squared, rotated and sobel pixels.
  533. * @param {array} pixels The pixels in a linear [r,g,b,a,...] array to loop
  534. * through.
  535. * @param {number} width The image width.
  536. * @param {number} height The image height.
  537. * @param {array} opt_integralImage Empty array of size `width * height` to
  538. * be filled with the integral image values. If not specified compute sum
  539. * values will be skipped.
  540. * @param {array} opt_integralImageSquare Empty array of size `width *
  541. * height` to be filled with the integral image squared values. If not
  542. * specified compute squared values will be skipped.
  543. * @param {array} opt_tiltedIntegralImage Empty array of size `width *
  544. * height` to be filled with the rotated integral image values. If not
  545. * specified compute sum values will be skipped.
  546. * @param {array} opt_integralImageSobel Empty array of size `width *
  547. * height` to be filled with the integral image of sobel values. If not
  548. * specified compute sobel filtering will be skipped.
  549. * @static
  550. */
  551. tracking.Image.computeIntegralImage = function(pixels, width, height, opt_integralImage, opt_integralImageSquare, opt_tiltedIntegralImage, opt_integralImageSobel) {
  552. if (arguments.length < 4) {
  553. throw new Error('You should specify at least one output array in the order: sum, square, tilted, sobel.');
  554. }
  555. var pixelsSobel;
  556. if (opt_integralImageSobel) {
  557. pixelsSobel = tracking.Image.sobel(pixels, width, height);
  558. }
  559. for (var i = 0; i < height; i++) {
  560. for (var j = 0; j < width; j++) {
  561. var w = i * width * 4 + j * 4;
  562. var pixel = ~~(pixels[w] * 0.299 + pixels[w + 1] * 0.587 + pixels[w + 2] * 0.114);
  563. if (opt_integralImage) {
  564. this.computePixelValueSAT_(opt_integralImage, width, i, j, pixel);
  565. }
  566. if (opt_integralImageSquare) {
  567. this.computePixelValueSAT_(opt_integralImageSquare, width, i, j, pixel * pixel);
  568. }
  569. if (opt_tiltedIntegralImage) {
  570. var w1 = w - width * 4;
  571. var pixelAbove = ~~(pixels[w1] * 0.299 + pixels[w1 + 1] * 0.587 + pixels[w1 + 2] * 0.114);
  572. this.computePixelValueRSAT_(opt_tiltedIntegralImage, width, i, j, pixel, pixelAbove || 0);
  573. }
  574. if (opt_integralImageSobel) {
  575. this.computePixelValueSAT_(opt_integralImageSobel, width, i, j, pixelsSobel[w]);
  576. }
  577. }
  578. }
  579. };
  580. /**
  581. * Helper method to compute the rotated summed area table (RSAT) by the
  582. * formula:
  583. *
  584. * RSAT(x, y) = RSAT(x-1, y-1) + RSAT(x+1, y-1) - RSAT(x, y-2) + I(x, y) + I(x, y-1)
  585. *
  586. * @param {number} width The image width.
  587. * @param {array} RSAT Empty array of size `width * height` to be filled with
  588. * the integral image values. If not specified compute sum values will be
  589. * skipped.
  590. * @param {number} i Vertical position of the pixel to be evaluated.
  591. * @param {number} j Horizontal position of the pixel to be evaluated.
  592. * @param {number} pixel Pixel value to be added to the integral image.
  593. * @static
  594. * @private
  595. */
  596. tracking.Image.computePixelValueRSAT_ = function(RSAT, width, i, j, pixel, pixelAbove) {
  597. var w = i * width + j;
  598. RSAT[w] = (RSAT[w - width - 1] || 0) + (RSAT[w - width + 1] || 0) - (RSAT[w - width - width] || 0) + pixel + pixelAbove;
  599. };
  600. /**
  601. * Helper method to compute the summed area table (SAT) by the formula:
  602. *
  603. * SAT(x, y) = SAT(x, y-1) + SAT(x-1, y) + I(x, y) - SAT(x-1, y-1)
  604. *
  605. * @param {number} width The image width.
  606. * @param {array} SAT Empty array of size `width * height` to be filled with
  607. * the integral image values. If not specified compute sum values will be
  608. * skipped.
  609. * @param {number} i Vertical position of the pixel to be evaluated.
  610. * @param {number} j Horizontal position of the pixel to be evaluated.
  611. * @param {number} pixel Pixel value to be added to the integral image.
  612. * @static
  613. * @private
  614. */
  615. tracking.Image.computePixelValueSAT_ = function(SAT, width, i, j, pixel) {
  616. var w = i * width + j;
  617. SAT[w] = (SAT[w - width] || 0) + (SAT[w - 1] || 0) + pixel - (SAT[w - width - 1] || 0);
  618. };
  619. /**
  620. * Converts a color from a colorspace based on an RGB color model to a
  621. * grayscale representation of its luminance. The coefficients represent the
  622. * measured intensity perception of typical trichromat humans, in
  623. * particular, human vision is most sensitive to green and least sensitive
  624. * to blue.
  625. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  626. * @param {number} width The image width.
  627. * @param {number} height The image height.
  628. * @param {boolean} fillRGBA If the result should fill all RGBA values with the gray scale
  629. * values, instead of returning a single value per pixel.
  630. * @param {Uint8ClampedArray} The grayscale pixels in a linear array ([p,p,p,a,...] if fillRGBA
  631. * is true and [p1, p2, p3, ...] if fillRGBA is false).
  632. * @static
  633. */
  634. tracking.Image.grayscale = function(pixels, width, height, fillRGBA) {
  635. var gray = new Uint8ClampedArray(fillRGBA ? pixels.length : pixels.length >> 2);
  636. var p = 0;
  637. var w = 0;
  638. for (var i = 0; i < height; i++) {
  639. for (var j = 0; j < width; j++) {
  640. var value = pixels[w] * 0.299 + pixels[w + 1] * 0.587 + pixels[w + 2] * 0.114;
  641. gray[p++] = value;
  642. if (fillRGBA) {
  643. gray[p++] = value;
  644. gray[p++] = value;
  645. gray[p++] = pixels[w + 3];
  646. }
  647. w += 4;
  648. }
  649. }
  650. return gray;
  651. };
  652. /**
  653. * Fast horizontal separable convolution. A point spread function (PSF) is
  654. * said to be separable if it can be broken into two one-dimensional
  655. * signals: a vertical and a horizontal projection. The convolution is
  656. * performed by sliding the kernel over the image, generally starting at the
  657. * top left corner, so as to move the kernel through all the positions where
  658. * the kernel fits entirely within the boundaries of the image. Adapted from
  659. * https://github.com/kig/canvasfilters.
  660. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  661. * @param {number} width The image width.
  662. * @param {number} height The image height.
  663. * @param {array} weightsVector The weighting vector, e.g [-1,0,1].
  664. * @param {number} opaque
  665. * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
  666. */
  667. tracking.Image.horizontalConvolve = function(pixels, width, height, weightsVector, opaque) {
  668. var side = weightsVector.length;
  669. var halfSide = Math.floor(side / 2);
  670. var output = new Float32Array(width * height * 4);
  671. var alphaFac = opaque ? 1 : 0;
  672. for (var y = 0; y < height; y++) {
  673. for (var x = 0; x < width; x++) {
  674. var sy = y;
  675. var sx = x;
  676. var offset = (y * width + x) * 4;
  677. var r = 0;
  678. var g = 0;
  679. var b = 0;
  680. var a = 0;
  681. for (var cx = 0; cx < side; cx++) {
  682. var scy = sy;
  683. var scx = Math.min(width - 1, Math.max(0, sx + cx - halfSide));
  684. var poffset = (scy * width + scx) * 4;
  685. var wt = weightsVector[cx];
  686. r += pixels[poffset] * wt;
  687. g += pixels[poffset + 1] * wt;
  688. b += pixels[poffset + 2] * wt;
  689. a += pixels[poffset + 3] * wt;
  690. }
  691. output[offset] = r;
  692. output[offset + 1] = g;
  693. output[offset + 2] = b;
  694. output[offset + 3] = a + alphaFac * (255 - a);
  695. }
  696. }
  697. return output;
  698. };
  699. /**
  700. * Fast vertical separable convolution. A point spread function (PSF) is
  701. * said to be separable if it can be broken into two one-dimensional
  702. * signals: a vertical and a horizontal projection. The convolution is
  703. * performed by sliding the kernel over the image, generally starting at the
  704. * top left corner, so as to move the kernel through all the positions where
  705. * the kernel fits entirely within the boundaries of the image. Adapted from
  706. * https://github.com/kig/canvasfilters.
  707. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  708. * @param {number} width The image width.
  709. * @param {number} height The image height.
  710. * @param {array} weightsVector The weighting vector, e.g [-1,0,1].
  711. * @param {number} opaque
  712. * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
  713. */
  714. tracking.Image.verticalConvolve = function(pixels, width, height, weightsVector, opaque) {
  715. var side = weightsVector.length;
  716. var halfSide = Math.floor(side / 2);
  717. var output = new Float32Array(width * height * 4);
  718. var alphaFac = opaque ? 1 : 0;
  719. for (var y = 0; y < height; y++) {
  720. for (var x = 0; x < width; x++) {
  721. var sy = y;
  722. var sx = x;
  723. var offset = (y * width + x) * 4;
  724. var r = 0;
  725. var g = 0;
  726. var b = 0;
  727. var a = 0;
  728. for (var cy = 0; cy < side; cy++) {
  729. var scy = Math.min(height - 1, Math.max(0, sy + cy - halfSide));
  730. var scx = sx;
  731. var poffset = (scy * width + scx) * 4;
  732. var wt = weightsVector[cy];
  733. r += pixels[poffset] * wt;
  734. g += pixels[poffset + 1] * wt;
  735. b += pixels[poffset + 2] * wt;
  736. a += pixels[poffset + 3] * wt;
  737. }
  738. output[offset] = r;
  739. output[offset + 1] = g;
  740. output[offset + 2] = b;
  741. output[offset + 3] = a + alphaFac * (255 - a);
  742. }
  743. }
  744. return output;
  745. };
  746. /**
  747. * Fast separable convolution. A point spread function (PSF) is said to be
  748. * separable if it can be broken into two one-dimensional signals: a
  749. * vertical and a horizontal projection. The convolution is performed by
  750. * sliding the kernel over the image, generally starting at the top left
  751. * corner, so as to move the kernel through all the positions where the
  752. * kernel fits entirely within the boundaries of the image. Adapted from
  753. * https://github.com/kig/canvasfilters.
  754. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  755. * @param {number} width The image width.
  756. * @param {number} height The image height.
  757. * @param {array} horizWeights The horizontal weighting vector, e.g [-1,0,1].
  758. * @param {array} vertWeights The vertical vector, e.g [-1,0,1].
  759. * @param {number} opaque
  760. * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
  761. */
  762. tracking.Image.separableConvolve = function(pixels, width, height, horizWeights, vertWeights, opaque) {
  763. var vertical = this.verticalConvolve(pixels, width, height, vertWeights, opaque);
  764. return this.horizontalConvolve(vertical, width, height, horizWeights, opaque);
  765. };
  766. /**
  767. * Compute image edges using Sobel operator. Computes the vertical and
  768. * horizontal gradients of the image and combines the computed images to
  769. * find edges in the image. The way we implement the Sobel filter here is by
  770. * first grayscaling the image, then taking the horizontal and vertical
  771. * gradients and finally combining the gradient images to make up the final
  772. * image. Adapted from https://github.com/kig/canvasfilters.
  773. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  774. * @param {number} width The image width.
  775. * @param {number} height The image height.
  776. * @return {array} The edge pixels in a linear [r,g,b,a,...] array.
  777. */
  778. tracking.Image.sobel = function(pixels, width, height) {
  779. pixels = this.grayscale(pixels, width, height, true);
  780. var output = new Float32Array(width * height * 4);
  781. var sobelSignVector = new Float32Array([-1, 0, 1]);
  782. var sobelScaleVector = new Float32Array([1, 2, 1]);
  783. var vertical = this.separableConvolve(pixels, width, height, sobelSignVector, sobelScaleVector);
  784. var horizontal = this.separableConvolve(pixels, width, height, sobelScaleVector, sobelSignVector);
  785. for (var i = 0; i < output.length; i += 4) {
  786. var v = vertical[i];
  787. var h = horizontal[i];
  788. var p = Math.sqrt(h * h + v * v);
  789. output[i] = p;
  790. output[i + 1] = p;
  791. output[i + 2] = p;
  792. output[i + 3] = 255;
  793. }
  794. return output;
  795. };
  796. /**
  797. * Equalizes the histogram of a grayscale image, normalizing the
  798. * brightness and increasing the contrast of the image.
  799. * @param {pixels} pixels The grayscale pixels in a linear array.
  800. * @param {number} width The image width.
  801. * @param {number} height The image height.
  802. * @return {array} The equalized grayscale pixels in a linear array.
  803. */
  804. tracking.Image.equalizeHist = function(pixels, width, height){
  805. var equalized = new Uint8ClampedArray(pixels.length);
  806. var histogram = new Array(256);
  807. for(var i=0; i < 256; i++) histogram[i] = 0;
  808. for(var i=0; i < pixels.length; i++){
  809. equalized[i] = pixels[i];
  810. histogram[pixels[i]]++;
  811. }
  812. var prev = histogram[0];
  813. for(var i=0; i < 256; i++){
  814. histogram[i] += prev;
  815. prev = histogram[i];
  816. }
  817. var norm = 255 / pixels.length;
  818. for(var i=0; i < pixels.length; i++)
  819. equalized[i] = (histogram[pixels[i]] * norm + 0.5) | 0;
  820. return equalized;
  821. }
  822. }());
  823. (function() {
  824. /**
  825. * ViolaJones utility.
  826. * @static
  827. * @constructor
  828. */
  829. tracking.ViolaJones = {};
  830. /**
  831. * Holds the minimum area of intersection that defines when a rectangle is
  832. * from the same group. Often when a face is matched multiple rectangles are
  833. * classified as possible rectangles to represent the face, when they
  834. * intersects they are grouped as one face.
  835. * @type {number}
  836. * @default 0.5
  837. * @static
  838. */
  839. tracking.ViolaJones.REGIONS_OVERLAP = 0.5;
  840. /**
  841. * Holds the HAAR cascade classifiers converted from OpenCV training.
  842. * @type {array}
  843. * @static
  844. */
  845. tracking.ViolaJones.classifiers = {};
  846. /**
  847. * Detects through the HAAR cascade data rectangles matches.
  848. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  849. * @param {number} width The image width.
  850. * @param {number} height The image height.
  851. * @param {number} initialScale The initial scale to start the block
  852. * scaling.
  853. * @param {number} scaleFactor The scale factor to scale the feature block.
  854. * @param {number} stepSize The block step size.
  855. * @param {number} edgesDensity Percentage density edges inside the
  856. * classifier block. Value from [0.0, 1.0], defaults to 0.2. If specified
  857. * edge detection will be applied to the image to prune dead areas of the
  858. * image, this can improve significantly performance.
  859. * @param {number} data The HAAR cascade data.
  860. * @return {array} Found rectangles.
  861. * @static
  862. */
  863. tracking.ViolaJones.detect = function(pixels, width, height, initialScale, scaleFactor, stepSize, edgesDensity, data) {
  864. var total = 0;
  865. var rects = [];
  866. var integralImage = new Int32Array(width * height);
  867. var integralImageSquare = new Int32Array(width * height);
  868. var tiltedIntegralImage = new Int32Array(width * height);
  869. var integralImageSobel;
  870. if (edgesDensity > 0) {
  871. integralImageSobel = new Int32Array(width * height);
  872. }
  873. tracking.Image.computeIntegralImage(pixels, width, height, integralImage, integralImageSquare, tiltedIntegralImage, integralImageSobel);
  874. var minWidth = data[0];
  875. var minHeight = data[1];
  876. var scale = initialScale * scaleFactor;
  877. var blockWidth = (scale * minWidth) | 0;
  878. var blockHeight = (scale * minHeight) | 0;
  879. while (blockWidth < width && blockHeight < height) {
  880. var step = (scale * stepSize + 0.5) | 0;
  881. for (var i = 0; i < (height - blockHeight); i += step) {
  882. for (var j = 0; j < (width - blockWidth); j += step) {
  883. if (edgesDensity > 0) {
  884. if (this.isTriviallyExcluded(edgesDensity, integralImageSobel, i, j, width, blockWidth, blockHeight)) {
  885. continue;
  886. }
  887. }
  888. if (this.evalStages_(data, integralImage, integralImageSquare, tiltedIntegralImage, i, j, width, blockWidth, blockHeight, scale)) {
  889. rects[total++] = {
  890. width: blockWidth,
  891. height: blockHeight,
  892. x: j,
  893. y: i
  894. };
  895. }
  896. }
  897. }
  898. scale *= scaleFactor;
  899. blockWidth = (scale * minWidth) | 0;
  900. blockHeight = (scale * minHeight) | 0;
  901. }
  902. return this.mergeRectangles_(rects);
  903. };
  904. /**
  905. * Fast check to test whether the edges density inside the block is greater
  906. * than a threshold, if true it tests the stages. This can improve
  907. * significantly performance.
  908. * @param {number} edgesDensity Percentage density edges inside the
  909. * classifier block.
  910. * @param {array} integralImageSobel The integral image of a sobel image.
  911. * @param {number} i Vertical position of the pixel to be evaluated.
  912. * @param {number} j Horizontal position of the pixel to be evaluated.
  913. * @param {number} width The image width.
  914. * @return {boolean} True whether the block at position i,j can be skipped,
  915. * false otherwise.
  916. * @static
  917. * @protected
  918. */
  919. tracking.ViolaJones.isTriviallyExcluded = function(edgesDensity, integralImageSobel, i, j, width, blockWidth, blockHeight) {
  920. var wbA = i * width + j;
  921. var wbB = wbA + blockWidth;
  922. var wbD = wbA + blockHeight * width;
  923. var wbC = wbD + blockWidth;
  924. var blockEdgesDensity = (integralImageSobel[wbA] - integralImageSobel[wbB] - integralImageSobel[wbD] + integralImageSobel[wbC]) / (blockWidth * blockHeight * 255);
  925. if (blockEdgesDensity < edgesDensity) {
  926. return true;
  927. }
  928. return false;
  929. };
  930. /**
  931. * Evaluates if the block size on i,j position is a valid HAAR cascade
  932. * stage.
  933. * @param {number} data The HAAR cascade data.
  934. * @param {number} i Vertical position of the pixel to be evaluated.
  935. * @param {number} j Horizontal position of the pixel to be evaluated.
  936. * @param {number} width The image width.
  937. * @param {number} blockSize The block size.
  938. * @param {number} scale The scale factor of the block size and its original
  939. * size.
  940. * @param {number} inverseArea The inverse area of the block size.
  941. * @return {boolean} Whether the region passes all the stage tests.
  942. * @private
  943. * @static
  944. */
  945. tracking.ViolaJones.evalStages_ = function(data, integralImage, integralImageSquare, tiltedIntegralImage, i, j, width, blockWidth, blockHeight, scale) {
  946. var inverseArea = 1.0 / (blockWidth * blockHeight);
  947. var wbA = i * width + j;
  948. var wbB = wbA + blockWidth;
  949. var wbD = wbA + blockHeight * width;
  950. var wbC = wbD + blockWidth;
  951. var mean = (integralImage[wbA] - integralImage[wbB] - integralImage[wbD] + integralImage[wbC]) * inverseArea;
  952. var variance = (integralImageSquare[wbA] - integralImageSquare[wbB] - integralImageSquare[wbD] + integralImageSquare[wbC]) * inverseArea - mean * mean;
  953. var standardDeviation = 1;
  954. if (variance > 0) {
  955. standardDeviation = Math.sqrt(variance);
  956. }
  957. var length = data.length;
  958. for (var w = 2; w < length; ) {
  959. var stageSum = 0;
  960. var stageThreshold = data[w++];
  961. var nodeLength = data[w++];
  962. while (nodeLength--) {
  963. var rectsSum = 0;
  964. var tilted = data[w++];
  965. var rectsLength = data[w++];
  966. for (var r = 0; r < rectsLength; r++) {
  967. var rectLeft = (j + data[w++] * scale + 0.5) | 0;
  968. var rectTop = (i + data[w++] * scale + 0.5) | 0;
  969. var rectWidth = (data[w++] * scale + 0.5) | 0;
  970. var rectHeight = (data[w++] * scale + 0.5) | 0;
  971. var rectWeight = data[w++];
  972. var w1;
  973. var w2;
  974. var w3;
  975. var w4;
  976. if (tilted) {
  977. // RectSum(r) = RSAT(x-h+w, y+w+h-1) + RSAT(x, y-1) - RSAT(x-h, y+h-1) - RSAT(x+w, y+w-1)
  978. w1 = (rectLeft - rectHeight + rectWidth) + (rectTop + rectWidth + rectHeight - 1) * width;
  979. w2 = rectLeft + (rectTop - 1) * width;
  980. w3 = (rectLeft - rectHeight) + (rectTop + rectHeight - 1) * width;
  981. w4 = (rectLeft + rectWidth) + (rectTop + rectWidth - 1) * width;
  982. rectsSum += (tiltedIntegralImage[w1] + tiltedIntegralImage[w2] - tiltedIntegralImage[w3] - tiltedIntegralImage[w4]) * rectWeight;
  983. } else {
  984. // RectSum(r) = SAT(x-1, y-1) + SAT(x+w-1, y+h-1) - SAT(x-1, y+h-1) - SAT(x+w-1, y-1)
  985. w1 = rectTop * width + rectLeft;
  986. w2 = w1 + rectWidth;
  987. w3 = w1 + rectHeight * width;
  988. w4 = w3 + rectWidth;
  989. rectsSum += (integralImage[w1] - integralImage[w2] - integralImage[w3] + integralImage[w4]) * rectWeight;
  990. // TODO: Review the code below to analyze performance when using it instead.
  991. // w1 = (rectLeft - 1) + (rectTop - 1) * width;
  992. // w2 = (rectLeft + rectWidth - 1) + (rectTop + rectHeight - 1) * width;
  993. // w3 = (rectLeft - 1) + (rectTop + rectHeight - 1) * width;
  994. // w4 = (rectLeft + rectWidth - 1) + (rectTop - 1) * width;
  995. // rectsSum += (integralImage[w1] + integralImage[w2] - integralImage[w3] - integralImage[w4]) * rectWeight;
  996. }
  997. }
  998. var nodeThreshold = data[w++];
  999. var nodeLeft = data[w++];
  1000. var nodeRight = data[w++];
  1001. if (rectsSum * inverseArea < nodeThreshold * standardDeviation) {
  1002. stageSum += nodeLeft;
  1003. } else {
  1004. stageSum += nodeRight;
  1005. }
  1006. }
  1007. if (stageSum < stageThreshold) {
  1008. return false;
  1009. }
  1010. }
  1011. return true;
  1012. };
  1013. /**
  1014. * Postprocess the detected sub-windows in order to combine overlapping
  1015. * detections into a single detection.
  1016. * @param {array} rects
  1017. * @return {array}
  1018. * @private
  1019. * @static
  1020. */
  1021. tracking.ViolaJones.mergeRectangles_ = function(rects) {
  1022. var disjointSet = new tracking.DisjointSet(rects.length);
  1023. for (var i = 0; i < rects.length; i++) {
  1024. var r1 = rects[i];
  1025. for (var j = 0; j < rects.length; j++) {
  1026. var r2 = rects[j];
  1027. if (tracking.Math.intersectRect(r1.x, r1.y, r1.x + r1.width, r1.y + r1.height, r2.x, r2.y, r2.x + r2.width, r2.y + r2.height)) {
  1028. var x1 = Math.max(r1.x, r2.x);
  1029. var y1 = Math.max(r1.y, r2.y);
  1030. var x2 = Math.min(r1.x + r1.width, r2.x + r2.width);
  1031. var y2 = Math.min(r1.y + r1.height, r2.y + r2.height);
  1032. var overlap = (x1 - x2) * (y1 - y2);
  1033. var area1 = (r1.width * r1.height);
  1034. var area2 = (r2.width * r2.height);
  1035. if ((overlap / (area1 * (area1 / area2)) >= this.REGIONS_OVERLAP) &&
  1036. (overlap / (area2 * (area1 / area2)) >= this.REGIONS_OVERLAP)) {
  1037. disjointSet.union(i, j);
  1038. }
  1039. }
  1040. }
  1041. }
  1042. var map = {};
  1043. for (var k = 0; k < disjointSet.length; k++) {
  1044. var rep = disjointSet.find(k);
  1045. if (!map[rep]) {
  1046. map[rep] = {
  1047. total: 1,
  1048. width: rects[k].width,
  1049. height: rects[k].height,
  1050. x: rects[k].x,
  1051. y: rects[k].y
  1052. };
  1053. continue;
  1054. }
  1055. map[rep].total++;
  1056. map[rep].width += rects[k].width;
  1057. map[rep].height += rects[k].height;
  1058. map[rep].x += rects[k].x;
  1059. map[rep].y += rects[k].y;
  1060. }
  1061. var result = [];
  1062. Object.keys(map).forEach(function(key) {
  1063. var rect = map[key];
  1064. result.push({
  1065. total: rect.total,
  1066. width: (rect.width / rect.total + 0.5) | 0,
  1067. height: (rect.height / rect.total + 0.5) | 0,
  1068. x: (rect.x / rect.total + 0.5) | 0,
  1069. y: (rect.y / rect.total + 0.5) | 0
  1070. });
  1071. });
  1072. return result;
  1073. };
  1074. }());
  1075. (function() {
  1076. /**
  1077. * Brief intends for "Binary Robust Independent Elementary Features".This
  1078. * method generates a binary string for each keypoint found by an extractor
  1079. * method.
  1080. * @static
  1081. * @constructor
  1082. */
  1083. tracking.Brief = {};
  1084. /**
  1085. * The set of binary tests is defined by the nd (x,y)-location pairs
  1086. * uniquely chosen during the initialization. Values could vary between N =
  1087. * 128,256,512. N=128 yield good compromises between speed, storage
  1088. * efficiency, and recognition rate.
  1089. * @type {number}
  1090. */
  1091. tracking.Brief.N = 512;
  1092. /**
  1093. * Caches coordinates values of (x,y)-location pairs uniquely chosen during
  1094. * the initialization.
  1095. * @type {Object.<number, Int32Array>}
  1096. * @private
  1097. * @static
  1098. */
  1099. tracking.Brief.randomImageOffsets_ = {};
  1100. /**
  1101. * Caches delta values of (x,y)-location pairs uniquely chosen during
  1102. * the initialization.
  1103. * @type {Int32Array}
  1104. * @private
  1105. * @static
  1106. */
  1107. tracking.Brief.randomWindowOffsets_ = null;
  1108. /**
  1109. * Generates a binary string for each found keypoints extracted using an
  1110. * extractor method.
  1111. * @param {array} The grayscale pixels in a linear [p1,p2,...] array.
  1112. * @param {number} width The image width.
  1113. * @param {array} keypoints
  1114. * @return {Int32Array} Returns an array where for each four sequence int
  1115. * values represent the descriptor binary string (128 bits) necessary
  1116. * to describe the corner, e.g. [0,0,0,0, 0,0,0,0, ...].
  1117. * @static
  1118. */
  1119. tracking.Brief.getDescriptors = function(pixels, width, keypoints) {
  1120. // Optimizing divide by 32 operation using binary shift
  1121. // (this.N >> 5) === this.N/32.
  1122. var descriptors = new Int32Array((keypoints.length >> 1) * (this.N >> 5));
  1123. var descriptorWord = 0;
  1124. var offsets = this.getRandomOffsets_(width);
  1125. var position = 0;
  1126. for (var i = 0; i < keypoints.length; i += 2) {
  1127. var w = width * keypoints[i + 1] + keypoints[i];
  1128. var offsetsPosition = 0;
  1129. for (var j = 0, n = this.N; j < n; j++) {
  1130. if (pixels[offsets[offsetsPosition++] + w] < pixels[offsets[offsetsPosition++] + w]) {
  1131. // The bit in the position `j % 32` of descriptorWord should be set to 1. We do
  1132. // this by making an OR operation with a binary number that only has the bit
  1133. // in that position set to 1. That binary number is obtained by shifting 1 left by
  1134. // `j % 32` (which is the same as `j & 31` left) positions.
  1135. descriptorWord |= 1 << (j & 31);
  1136. }
  1137. // If the next j is a multiple of 32, we will need to use a new descriptor word to hold
  1138. // the next results.
  1139. if (!((j + 1) & 31)) {
  1140. descriptors[position++] = descriptorWord;
  1141. descriptorWord = 0;
  1142. }
  1143. }
  1144. }
  1145. return descriptors;
  1146. };
  1147. /**
  1148. * Matches sets of features {mi} and {m′j} extracted from two images taken
  1149. * from similar, and often successive, viewpoints. A classical procedure
  1150. * runs as follows. For each point {mi} in the first image, search in a
  1151. * region of the second image around location {mi} for point {m′j}. The
  1152. * search is based on the similarity of the local image windows, also known
  1153. * as kernel windows, centered on the points, which strongly characterizes
  1154. * the points when the images are sufficiently close. Once each keypoint is
  1155. * described with its binary string, they need to be compared with the
  1156. * closest matching point. Distance metric is critical to the performance of
  1157. * in- trusion detection systems. Thus using binary strings reduces the size
  1158. * of the descriptor and provides an interesting data structure that is fast
  1159. * to operate whose similarity can be measured by the Hamming distance.
  1160. * @param {array} keypoints1
  1161. * @param {array} descriptors1
  1162. * @param {array} keypoints2
  1163. * @param {array} descriptors2
  1164. * @return {Int32Array} Returns an array where the index is the corner1
  1165. * index coordinate, and the value is the corresponding match index of
  1166. * corner2, e.g. keypoints1=[x0,y0,x1,y1,...] and
  1167. * keypoints2=[x'0,y'0,x'1,y'1,...], if x0 matches x'1 and x1 matches x'0,
  1168. * the return array would be [3,0].
  1169. * @static
  1170. */
  1171. tracking.Brief.match = function(keypoints1, descriptors1, keypoints2, descriptors2) {
  1172. var len1 = keypoints1.length >> 1;
  1173. var len2 = keypoints2.length >> 1;
  1174. var matches = new Array(len1);
  1175. for (var i = 0; i < len1; i++) {
  1176. var min = Infinity;
  1177. var minj = 0;
  1178. for (var j = 0; j < len2; j++) {
  1179. var dist = 0;
  1180. // Optimizing divide by 32 operation using binary shift
  1181. // (this.N >> 5) === this.N/32.
  1182. for (var k = 0, n = this.N >> 5; k < n; k++) {
  1183. dist += tracking.Math.hammingWeight(descriptors1[i * n + k] ^ descriptors2[j * n + k]);
  1184. }
  1185. if (dist < min) {
  1186. min = dist;
  1187. minj = j;
  1188. }
  1189. }
  1190. matches[i] = {
  1191. index1: i,
  1192. index2: minj,
  1193. keypoint1: [keypoints1[2 * i], keypoints1[2 * i + 1]],
  1194. keypoint2: [keypoints2[2 * minj], keypoints2[2 * minj + 1]],
  1195. confidence: 1 - min / this.N
  1196. };
  1197. }
  1198. return matches;
  1199. };
  1200. /**
  1201. * Removes matches outliers by testing matches on both directions.
  1202. * @param {array} keypoints1
  1203. * @param {array} descriptors1
  1204. * @param {array} keypoints2
  1205. * @param {array} descriptors2
  1206. * @return {Int32Array} Returns an array where the index is the corner1
  1207. * index coordinate, and the value is the corresponding match index of
  1208. * corner2, e.g. keypoints1=[x0,y0,x1,y1,...] and
  1209. * keypoints2=[x'0,y'0,x'1,y'1,...], if x0 matches x'1 and x1 matches x'0,
  1210. * the return array would be [3,0].
  1211. * @static
  1212. */
  1213. tracking.Brief.reciprocalMatch = function(keypoints1, descriptors1, keypoints2, descriptors2) {
  1214. var matches = [];
  1215. if (keypoints1.length === 0 || keypoints2.length === 0) {
  1216. return matches;
  1217. }
  1218. var matches1 = tracking.Brief.match(keypoints1, descriptors1, keypoints2, descriptors2);
  1219. var matches2 = tracking.Brief.match(keypoints2, descriptors2, keypoints1, descriptors1);
  1220. for (var i = 0; i < matches1.length; i++) {
  1221. if (matches2[matches1[i].index2].index2 === i) {
  1222. matches.push(matches1[i]);
  1223. }
  1224. }
  1225. return matches;
  1226. };
  1227. /**
  1228. * Gets the coordinates values of (x,y)-location pairs uniquely chosen
  1229. * during the initialization.
  1230. * @return {array} Array with the random offset values.
  1231. * @private
  1232. */
  1233. tracking.Brief.getRandomOffsets_ = function(width) {
  1234. if (!this.randomWindowOffsets_) {
  1235. var windowPosition = 0;
  1236. var windowOffsets = new Int32Array(4 * this.N);
  1237. for (var i = 0; i < this.N; i++) {
  1238. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1239. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1240. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1241. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1242. }
  1243. this.randomWindowOffsets_ = windowOffsets;
  1244. }
  1245. if (!this.randomImageOffsets_[width]) {
  1246. var imagePosition = 0;
  1247. var imageOffsets = new Int32Array(2 * this.N);
  1248. for (var j = 0; j < this.N; j++) {
  1249. imageOffsets[imagePosition++] = this.randomWindowOffsets_[4 * j] * width + this.randomWindowOffsets_[4 * j + 1];
  1250. imageOffsets[imagePosition++] = this.randomWindowOffsets_[4 * j + 2] * width + this.randomWindowOffsets_[4 * j + 3];
  1251. }
  1252. this.randomImageOffsets_[width] = imageOffsets;
  1253. }
  1254. return this.randomImageOffsets_[width];
  1255. };
  1256. }());
  1257. (function() {
  1258. /**
  1259. * FAST intends for "Features from Accelerated Segment Test". This method
  1260. * performs a point segment test corner detection. The segment test
  1261. * criterion operates by considering a circle of sixteen pixels around the
  1262. * corner candidate p. The detector classifies p as a corner if there exists
  1263. * a set of n contiguous pixelsin the circle which are all brighter than the
  1264. * intensity of the candidate pixel Ip plus a threshold t, or all darker
  1265. * than Ip − t.
  1266. *
  1267. * 15 00 01
  1268. * 14 02
  1269. * 13 03
  1270. * 12 [] 04
  1271. * 11 05
  1272. * 10 06
  1273. * 09 08 07
  1274. *
  1275. * For more reference:
  1276. * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.60.3991&rep=rep1&type=pdf
  1277. * @static
  1278. * @constructor
  1279. */
  1280. tracking.Fast = {};
  1281. /**
  1282. * Holds the threshold to determine whether the tested pixel is brighter or
  1283. * darker than the corner candidate p.
  1284. * @type {number}
  1285. * @default 40
  1286. * @static
  1287. */
  1288. tracking.Fast.THRESHOLD = 40;
  1289. /**
  1290. * Caches coordinates values of the circle surrounding the pixel candidate p.
  1291. * @type {Object.<number, Int32Array>}
  1292. * @private
  1293. * @static
  1294. */
  1295. tracking.Fast.circles_ = {};
  1296. /**
  1297. * Finds corners coordinates on the graysacaled image.
  1298. * @param {array} The grayscale pixels in a linear [p1,p2,...] array.
  1299. * @param {number} width The image width.
  1300. * @param {number} height The image height.
  1301. * @param {number} threshold to determine whether the tested pixel is brighter or
  1302. * darker than the corner candidate p. Default value is 40.
  1303. * @return {array} Array containing the coordinates of all found corners,
  1304. * e.g. [x0,y0,x1,y1,...], where P(x0,y0) represents a corner coordinate.
  1305. * @static
  1306. */
  1307. tracking.Fast.findCorners = function(pixels, width, height, opt_threshold) {
  1308. var circleOffsets = this.getCircleOffsets_(width);
  1309. var circlePixels = new Int32Array(16);
  1310. var corners = [];
  1311. if (opt_threshold === undefined) {
  1312. opt_threshold = this.THRESHOLD;
  1313. }
  1314. // When looping through the image pixels, skips the first three lines from
  1315. // the image boundaries to constrain the surrounding circle inside the image
  1316. // area.
  1317. for (var i = 3; i < height - 3; i++) {
  1318. for (var j = 3; j < width - 3; j++) {
  1319. var w = i * width + j;
  1320. var p = pixels[w];
  1321. // Loops the circle offsets to read the pixel value for the sixteen
  1322. // surrounding pixels.
  1323. for (var k = 0; k < 16; k++) {
  1324. circlePixels[k] = pixels[w + circleOffsets[k]];
  1325. }
  1326. if (this.isCorner(p, circlePixels, opt_threshold)) {
  1327. // The pixel p is classified as a corner, as optimization increment j
  1328. // by the circle radius 3 to skip the neighbor pixels inside the
  1329. // surrounding circle. This can be removed without compromising the
  1330. // result.
  1331. corners.push(j, i);
  1332. j += 3;
  1333. }
  1334. }
  1335. }
  1336. return corners;
  1337. };
  1338. /**
  1339. * Checks if the circle pixel is brighter than the candidate pixel p by
  1340. * a threshold.
  1341. * @param {number} circlePixel The circle pixel value.
  1342. * @param {number} p The value of the candidate pixel p.
  1343. * @param {number} threshold
  1344. * @return {Boolean}
  1345. * @static
  1346. */
  1347. tracking.Fast.isBrighter = function(circlePixel, p, threshold) {
  1348. return circlePixel - p > threshold;
  1349. };
  1350. /**
  1351. * Checks if the circle pixel is within the corner of the candidate pixel p
  1352. * by a threshold.
  1353. * @param {number} p The value of the candidate pixel p.
  1354. * @param {number} circlePixel The circle pixel value.
  1355. * @param {number} threshold
  1356. * @return {Boolean}
  1357. * @static
  1358. */
  1359. tracking.Fast.isCorner = function(p, circlePixels, threshold) {
  1360. if (this.isTriviallyExcluded(circlePixels, p, threshold)) {
  1361. return false;
  1362. }
  1363. for (var x = 0; x < 16; x++) {
  1364. var darker = true;
  1365. var brighter = true;
  1366. for (var y = 0; y < 9; y++) {
  1367. var circlePixel = circlePixels[(x + y) & 15];
  1368. if (!this.isBrighter(p, circlePixel, threshold)) {
  1369. brighter = false;
  1370. if (darker === false) {
  1371. break;
  1372. }
  1373. }
  1374. if (!this.isDarker(p, circlePixel, threshold)) {
  1375. darker = false;
  1376. if (brighter === false) {
  1377. break;
  1378. }
  1379. }
  1380. }
  1381. if (brighter || darker) {
  1382. return true;
  1383. }
  1384. }
  1385. return false;
  1386. };
  1387. /**
  1388. * Checks if the circle pixel is darker than the candidate pixel p by
  1389. * a threshold.
  1390. * @param {number} circlePixel The circle pixel value.
  1391. * @param {number} p The value of the candidate pixel p.
  1392. * @param {number} threshold
  1393. * @return {Boolean}
  1394. * @static
  1395. */
  1396. tracking.Fast.isDarker = function(circlePixel, p, threshold) {
  1397. return p - circlePixel > threshold;
  1398. };
  1399. /**
  1400. * Fast check to test if the candidate pixel is a trivially excluded value.
  1401. * In order to be a corner, the candidate pixel value should be darker or
  1402. * brighter than 9-12 surrounding pixels, when at least three of the top,
  1403. * bottom, left and right pixels are brighter or darker it can be
  1404. * automatically excluded improving the performance.
  1405. * @param {number} circlePixel The circle pixel value.
  1406. * @param {number} p The value of the candidate pixel p.
  1407. * @param {number} threshold
  1408. * @return {Boolean}
  1409. * @static
  1410. * @protected
  1411. */
  1412. tracking.Fast.isTriviallyExcluded = function(circlePixels, p, threshold) {
  1413. var count = 0;
  1414. var circleBottom = circlePixels[8];
  1415. var circleLeft = circlePixels[12];
  1416. var circleRight = circlePixels[4];
  1417. var circleTop = circlePixels[0];
  1418. if (this.isBrighter(circleTop, p, threshold)) {
  1419. count++;
  1420. }
  1421. if (this.isBrighter(circleRight, p, threshold)) {
  1422. count++;
  1423. }
  1424. if (this.isBrighter(circleBottom, p, threshold)) {
  1425. count++;
  1426. }
  1427. if (this.isBrighter(circleLeft, p, threshold)) {
  1428. count++;
  1429. }
  1430. if (count < 3) {
  1431. count = 0;
  1432. if (this.isDarker(circleTop, p, threshold)) {
  1433. count++;
  1434. }
  1435. if (this.isDarker(circleRight, p, threshold)) {
  1436. count++;
  1437. }
  1438. if (this.isDarker(circleBottom, p, threshold)) {
  1439. count++;
  1440. }
  1441. if (this.isDarker(circleLeft, p, threshold)) {
  1442. count++;
  1443. }
  1444. if (count < 3) {
  1445. return true;
  1446. }
  1447. }
  1448. return false;
  1449. };
  1450. /**
  1451. * Gets the sixteen offset values of the circle surrounding pixel.
  1452. * @param {number} width The image width.
  1453. * @return {array} Array with the sixteen offset values of the circle
  1454. * surrounding pixel.
  1455. * @private
  1456. */
  1457. tracking.Fast.getCircleOffsets_ = function(width) {
  1458. if (this.circles_[width]) {
  1459. return this.circles_[width];
  1460. }
  1461. var circle = new Int32Array(16);
  1462. circle[0] = -width - width - width;
  1463. circle[1] = circle[0] + 1;
  1464. circle[2] = circle[1] + width + 1;
  1465. circle[3] = circle[2] + width + 1;
  1466. circle[4] = circle[3] + width;
  1467. circle[5] = circle[4] + width;
  1468. circle[6] = circle[5] + width - 1;
  1469. circle[7] = circle[6] + width - 1;
  1470. circle[8] = circle[7] - 1;
  1471. circle[9] = circle[8] - 1;
  1472. circle[10] = circle[9] - width - 1;
  1473. circle[11] = circle[10] - width - 1;
  1474. circle[12] = circle[11] - width;
  1475. circle[13] = circle[12] - width;
  1476. circle[14] = circle[13] - width + 1;
  1477. circle[15] = circle[14] - width + 1;
  1478. this.circles_[width] = circle;
  1479. return circle;
  1480. };
  1481. }());
  1482. (function() {
  1483. /**
  1484. * Math utility.
  1485. * @static
  1486. * @constructor
  1487. */
  1488. tracking.Math = {};
  1489. /**
  1490. * Euclidean distance between two points P(x0, y0) and P(x1, y1).
  1491. * @param {number} x0 Horizontal coordinate of P0.
  1492. * @param {number} y0 Vertical coordinate of P0.
  1493. * @param {number} x1 Horizontal coordinate of P1.
  1494. * @param {number} y1 Vertical coordinate of P1.
  1495. * @return {number} The euclidean distance.
  1496. */
  1497. tracking.Math.distance = function(x0, y0, x1, y1) {
  1498. var dx = x1 - x0;
  1499. var dy = y1 - y0;
  1500. return Math.sqrt(dx * dx + dy * dy);
  1501. };
  1502. /**
  1503. * Calculates the Hamming weight of a string, which is the number of symbols that are
  1504. * different from the zero-symbol of the alphabet used. It is thus
  1505. * equivalent to the Hamming distance from the all-zero string of the same
  1506. * length. For the most typical case, a string of bits, this is the number
  1507. * of 1's in the string.
  1508. *
  1509. * Example:
  1510. *
  1511. * <pre>
  1512. * Binary string Hamming weight
  1513. * 11101 4
  1514. * 11101010 5
  1515. * </pre>
  1516. *
  1517. * @param {number} i Number that holds the binary string to extract the hamming weight.
  1518. * @return {number} The hamming weight.
  1519. */
  1520. tracking.Math.hammingWeight = function(i) {
  1521. i = i - ((i >> 1) & 0x55555555);
  1522. i = (i & 0x33333333) + ((i >> 2) & 0x33333333);
  1523. return ((i + (i >> 4) & 0xF0F0F0F) * 0x1010101) >> 24;
  1524. };
  1525. /**
  1526. * Generates a random number between [a, b] interval.
  1527. * @param {number} a
  1528. * @param {number} b
  1529. * @return {number}
  1530. */
  1531. tracking.Math.uniformRandom = function(a, b) {
  1532. return a + Math.random() * (b - a);
  1533. };
  1534. /**
  1535. * Tests if a rectangle intersects with another.
  1536. *
  1537. * <pre>
  1538. * x0y0 -------- x2y2 --------
  1539. * | | | |
  1540. * -------- x1y1 -------- x3y3
  1541. * </pre>
  1542. *
  1543. * @param {number} x0 Horizontal coordinate of P0.
  1544. * @param {number} y0 Vertical coordinate of P0.
  1545. * @param {number} x1 Horizontal coordinate of P1.
  1546. * @param {number} y1 Vertical coordinate of P1.
  1547. * @param {number} x2 Horizontal coordinate of P2.
  1548. * @param {number} y2 Vertical coordinate of P2.
  1549. * @param {number} x3 Horizontal coordinate of P3.
  1550. * @param {number} y3 Vertical coordinate of P3.
  1551. * @return {boolean}
  1552. */
  1553. tracking.Math.intersectRect = function(x0, y0, x1, y1, x2, y2, x3, y3) {
  1554. return !(x2 > x1 || x3 < x0 || y2 > y1 || y3 < y0);
  1555. };
  1556. }());
  1557. (function() {
  1558. /**
  1559. * Matrix utility.
  1560. * @static
  1561. * @constructor
  1562. */
  1563. tracking.Matrix = {};
  1564. /**
  1565. * Loops the array organized as major-row order and executes `fn` callback
  1566. * for each iteration. The `fn` callback receives the following parameters:
  1567. * `(r,g,b,a,index,i,j)`, where `r,g,b,a` represents the pixel color with
  1568. * alpha channel, `index` represents the position in the major-row order
  1569. * array and `i,j` the respective indexes positions in two dimensions.
  1570. * @param {array} pixels The pixels in a linear [r,g,b,a,...] array to loop
  1571. * through.
  1572. * @param {number} width The image width.
  1573. * @param {number} height The image height.
  1574. * @param {function} fn The callback function for each pixel.
  1575. * @param {number} opt_jump Optional jump for the iteration, by default it
  1576. * is 1, hence loops all the pixels of the array.
  1577. * @static
  1578. */
  1579. tracking.Matrix.forEach = function(pixels, width, height, fn, opt_jump) {
  1580. opt_jump = opt_jump || 1;
  1581. for (var i = 0; i < height; i += opt_jump) {
  1582. for (var j = 0; j < width; j += opt_jump) {
  1583. var w = i * width * 4 + j * 4;
  1584. fn.call(this, pixels[w], pixels[w + 1], pixels[w + 2], pixels[w + 3], w, i, j);
  1585. }
  1586. }
  1587. };
  1588. /**
  1589. * Calculates the per-element subtraction of two NxM matrices and returns a
  1590. * new NxM matrix as the result.
  1591. * @param {matrix} a The first matrix.
  1592. * @param {matrix} a The second matrix.
  1593. * @static
  1594. */
  1595. tracking.Matrix.sub = function(a, b){
  1596. var res = tracking.Matrix.clone(a);
  1597. for(var i=0; i < res.length; i++){
  1598. for(var j=0; j < res[i].length; j++){
  1599. res[i][j] -= b[i][j];
  1600. }
  1601. }
  1602. return res;
  1603. }
  1604. /**
  1605. * Calculates the per-element sum of two NxM matrices and returns a new NxM
  1606. * NxM matrix as the result.
  1607. * @param {matrix} a The first matrix.
  1608. * @param {matrix} a The second matrix.
  1609. * @static
  1610. */
  1611. tracking.Matrix.add = function(a, b){
  1612. var res = tracking.Matrix.clone(a);
  1613. for(var i=0; i < res.length; i++){
  1614. for(var j=0; j < res[i].length; j++){
  1615. res[i][j] += b[i][j];
  1616. }
  1617. }
  1618. return res;
  1619. }
  1620. /**
  1621. * Clones a matrix (or part of it) and returns a new matrix as the result.
  1622. * @param {matrix} src The matrix to be cloned.
  1623. * @param {number} width The second matrix.
  1624. * @static
  1625. */
  1626. tracking.Matrix.clone = function(src, width, height){
  1627. width = width || src[0].length;
  1628. height = height || src.length;
  1629. var temp = new Array(height);
  1630. var i = height;
  1631. while(i--){
  1632. temp[i] = new Array(width);
  1633. var j = width;
  1634. while(j--) temp[i][j] = src[i][j];
  1635. }
  1636. return temp;
  1637. }
  1638. /**
  1639. * Multiply a matrix by a scalar and returns a new matrix as the result.
  1640. * @param {number} scalar The scalar to multiply the matrix by.
  1641. * @param {matrix} src The matrix to be multiplied.
  1642. * @static
  1643. */
  1644. tracking.Matrix.mulScalar = function(scalar, src){
  1645. var res = tracking.Matrix.clone(src);
  1646. for(var i=0; i < src.length; i++){
  1647. for(var j=0; j < src[i].length; j++){
  1648. res[i][j] *= scalar;
  1649. }
  1650. }
  1651. return res;
  1652. }
  1653. /**
  1654. * Transpose a matrix and returns a new matrix as the result.
  1655. * @param {matrix} src The matrix to be transposed.
  1656. * @static
  1657. */
  1658. tracking.Matrix.transpose = function(src){
  1659. var transpose = new Array(src[0].length);
  1660. for(var i=0; i < src[0].length; i++){
  1661. transpose[i] = new Array(src.length);
  1662. for(var j=0; j < src.length; j++){
  1663. transpose[i][j] = src[j][i];
  1664. }
  1665. }
  1666. return transpose;
  1667. }
  1668. /**
  1669. * Multiply an MxN matrix with an NxP matrix and returns a new MxP matrix
  1670. * as the result.
  1671. * @param {matrix} a The first matrix.
  1672. * @param {matrix} b The second matrix.
  1673. * @static
  1674. */
  1675. tracking.Matrix.mul = function(a, b) {
  1676. var res = new Array(a.length);
  1677. for (var i = 0; i < a.length; i++) {
  1678. res[i] = new Array(b[0].length);
  1679. for (var j = 0; j < b[0].length; j++) {
  1680. res[i][j] = 0;
  1681. for (var k = 0; k < a[0].length; k++) {
  1682. res[i][j] += a[i][k] * b[k][j];
  1683. }
  1684. }
  1685. }
  1686. return res;
  1687. }
  1688. /**
  1689. * Calculates the absolute norm of a matrix.
  1690. * @param {matrix} src The matrix which norm will be calculated.
  1691. * @static
  1692. */
  1693. tracking.Matrix.norm = function(src){
  1694. var res = 0;
  1695. for(var i=0; i < src.length; i++){
  1696. for(var j=0; j < src[i].length; j++){
  1697. res += src[i][j]*src[i][j];
  1698. }
  1699. }
  1700. return Math.sqrt(res);
  1701. }
  1702. /**
  1703. * Calculates and returns the covariance matrix of a set of vectors as well
  1704. * as the mean of the matrix.
  1705. * @param {matrix} src The matrix which covariance matrix will be calculated.
  1706. * @static
  1707. */
  1708. tracking.Matrix.calcCovarMatrix = function(src){
  1709. var mean = new Array(src.length);
  1710. for(var i=0; i < src.length; i++){
  1711. mean[i] = [0.0];
  1712. for(var j=0; j < src[i].length; j++){
  1713. mean[i][0] += src[i][j]/src[i].length;
  1714. }
  1715. }
  1716. var deltaFull = tracking.Matrix.clone(mean);
  1717. for(var i=0; i < deltaFull.length; i++){
  1718. for(var j=0; j < src[0].length - 1; j++){
  1719. deltaFull[i].push(deltaFull[i][0]);
  1720. }
  1721. }
  1722. var a = tracking.Matrix.sub(src, deltaFull);
  1723. var b = tracking.Matrix.transpose(a);
  1724. var covar = tracking.Matrix.mul(b,a);
  1725. return [covar, mean];
  1726. }
  1727. }());
  1728. (function() {
  1729. /**
  1730. * EPnp utility.
  1731. * @static
  1732. * @constructor
  1733. */
  1734. tracking.EPnP = {};
  1735. tracking.EPnP.solve = function(objectPoints, imagePoints, cameraMatrix) {};
  1736. }());
  1737. (function() {
  1738. /**
  1739. * Tracker utility.
  1740. * @constructor
  1741. * @extends {tracking.EventEmitter}
  1742. */
  1743. tracking.Tracker = function() {
  1744. tracking.Tracker.base(this, 'constructor');
  1745. };
  1746. tracking.inherits(tracking.Tracker, tracking.EventEmitter);
  1747. /**
  1748. * Tracks the pixels on the array. This method is called for each video
  1749. * frame in order to emit `track` event.
  1750. * @param {Uint8ClampedArray} pixels The pixels data to track.
  1751. * @param {number} width The pixels canvas width.
  1752. * @param {number} height The pixels canvas height.
  1753. */
  1754. tracking.Tracker.prototype.track = function() {};
  1755. }());
  1756. (function() {
  1757. /**
  1758. * TrackerTask utility.
  1759. * @constructor
  1760. * @extends {tracking.EventEmitter}
  1761. */
  1762. tracking.TrackerTask = function(tracker) {
  1763. tracking.TrackerTask.base(this, 'constructor');
  1764. if (!tracker) {
  1765. throw new Error('Tracker instance not specified.');
  1766. }
  1767. this.setTracker(tracker);
  1768. };
  1769. tracking.inherits(tracking.TrackerTask, tracking.EventEmitter);
  1770. /**
  1771. * Holds the tracker instance managed by this task.
  1772. * @type {tracking.Tracker}
  1773. * @private
  1774. */
  1775. tracking.TrackerTask.prototype.tracker_ = null;
  1776. /**
  1777. * Holds if the tracker task is in running.
  1778. * @type {boolean}
  1779. * @private
  1780. */
  1781. tracking.TrackerTask.prototype.running_ = false;
  1782. /**
  1783. * Gets the tracker instance managed by this task.
  1784. * @return {tracking.Tracker}
  1785. */
  1786. tracking.TrackerTask.prototype.getTracker = function() {
  1787. return this.tracker_;
  1788. };
  1789. /**
  1790. * Returns true if the tracker task is in running, false otherwise.
  1791. * @return {boolean}
  1792. * @private
  1793. */
  1794. tracking.TrackerTask.prototype.inRunning = function() {
  1795. return this.running_;
  1796. };
  1797. /**
  1798. * Sets if the tracker task is in running.
  1799. * @param {boolean} running
  1800. * @private
  1801. */
  1802. tracking.TrackerTask.prototype.setRunning = function(running) {
  1803. this.running_ = running;
  1804. };
  1805. /**
  1806. * Sets the tracker instance managed by this task.
  1807. * @return {tracking.Tracker}
  1808. */
  1809. tracking.TrackerTask.prototype.setTracker = function(tracker) {
  1810. this.tracker_ = tracker;
  1811. };
  1812. /**
  1813. * Emits a `run` event on the tracker task for the implementers to run any
  1814. * child action, e.g. `requestAnimationFrame`.
  1815. * @return {object} Returns itself, so calls can be chained.
  1816. */
  1817. tracking.TrackerTask.prototype.run = function() {
  1818. var self = this;
  1819. if (this.inRunning()) {
  1820. return;
  1821. }
  1822. this.setRunning(true);
  1823. this.reemitTrackEvent_ = function(event) {
  1824. self.emit('track', event);
  1825. };
  1826. this.tracker_.on('track', this.reemitTrackEvent_);
  1827. this.emit('run');
  1828. return this;
  1829. };
  1830. /**
  1831. * Emits a `stop` event on the tracker task for the implementers to stop any
  1832. * child action being done, e.g. `requestAnimationFrame`.
  1833. * @return {object} Returns itself, so calls can be chained.
  1834. */
  1835. tracking.TrackerTask.prototype.stop = function() {
  1836. if (!this.inRunning()) {
  1837. return;
  1838. }
  1839. this.setRunning(false);
  1840. this.emit('stop');
  1841. this.tracker_.removeListener('track', this.reemitTrackEvent_);
  1842. return this;
  1843. };
  1844. }());
  1845. (function() {
  1846. /**
  1847. * ColorTracker utility to track colored blobs in a frame using color
  1848. * difference evaluation.
  1849. * @constructor
  1850. * @param {string|Array.<string>} opt_colors Optional colors to track.
  1851. * @extends {tracking.Tracker}
  1852. */
  1853. tracking.ColorTracker = function(opt_colors) {
  1854. tracking.ColorTracker.base(this, 'constructor');
  1855. if (typeof opt_colors === 'string') {
  1856. opt_colors = [opt_colors];
  1857. }
  1858. if (opt_colors) {
  1859. opt_colors.forEach(function(color) {
  1860. if (!tracking.ColorTracker.getColor(color)) {
  1861. throw new Error('Color not valid, try `new tracking.ColorTracker("magenta")`.');
  1862. }
  1863. });
  1864. this.setColors(opt_colors);
  1865. }
  1866. };
  1867. tracking.inherits(tracking.ColorTracker, tracking.Tracker);
  1868. /**
  1869. * Holds the known colors.
  1870. * @type {Object.<string, function>}
  1871. * @private
  1872. * @static
  1873. */
  1874. tracking.ColorTracker.knownColors_ = {};
  1875. /**
  1876. * Caches coordinates values of the neighbours surrounding a pixel.
  1877. * @type {Object.<number, Int32Array>}
  1878. * @private
  1879. * @static
  1880. */
  1881. tracking.ColorTracker.neighbours_ = {};
  1882. /**
  1883. * Registers a color as known color.
  1884. * @param {string} name The color name.
  1885. * @param {function} fn The color function to test if the passed (r,g,b) is
  1886. * the desired color.
  1887. * @static
  1888. */
  1889. tracking.ColorTracker.registerColor = function(name, fn) {
  1890. tracking.ColorTracker.knownColors_[name] = fn;
  1891. };
  1892. /**
  1893. * Gets the known color function that is able to test whether an (r,g,b) is
  1894. * the desired color.
  1895. * @param {string} name The color name.
  1896. * @return {function} The known color test function.
  1897. * @static
  1898. */
  1899. tracking.ColorTracker.getColor = function(name) {
  1900. return tracking.ColorTracker.knownColors_[name];
  1901. };
  1902. /**
  1903. * Holds the colors to be tracked by the `ColorTracker` instance.
  1904. * @default ['magenta']
  1905. * @type {Array.<string>}
  1906. */
  1907. tracking.ColorTracker.prototype.colors = ['magenta'];
  1908. /**
  1909. * Holds the minimum dimension to classify a rectangle.
  1910. * @default 20
  1911. * @type {number}
  1912. */
  1913. tracking.ColorTracker.prototype.minDimension = 20;
  1914. /**
  1915. * Holds the maximum dimension to classify a rectangle.
  1916. * @default Infinity
  1917. * @type {number}
  1918. */
  1919. tracking.ColorTracker.prototype.maxDimension = Infinity;
  1920. /**
  1921. * Holds the minimum group size to be classified as a rectangle.
  1922. * @default 30
  1923. * @type {number}
  1924. */
  1925. tracking.ColorTracker.prototype.minGroupSize = 30;
  1926. /**
  1927. * Calculates the central coordinate from the cloud points. The cloud points
  1928. * are all points that matches the desired color.
  1929. * @param {Array.<number>} cloud Major row order array containing all the
  1930. * points from the desired color, e.g. [x1, y1, c2, y2, ...].
  1931. * @param {number} total Total numbers of pixels of the desired color.
  1932. * @return {object} Object containing the x, y and estimated z coordinate of
  1933. * the blog extracted from the cloud points.
  1934. * @private
  1935. */
  1936. tracking.ColorTracker.prototype.calculateDimensions_ = function(cloud, total) {
  1937. var maxx = -1;
  1938. var maxy = -1;
  1939. var minx = Infinity;
  1940. var miny = Infinity;
  1941. for (var c = 0; c < total; c += 2) {
  1942. var x = cloud[c];
  1943. var y = cloud[c + 1];
  1944. if (x < minx) {
  1945. minx = x;
  1946. }
  1947. if (x > maxx) {
  1948. maxx = x;
  1949. }
  1950. if (y < miny) {
  1951. miny = y;
  1952. }
  1953. if (y > maxy) {
  1954. maxy = y;
  1955. }
  1956. }
  1957. return {
  1958. width: maxx - minx,
  1959. height: maxy - miny,
  1960. x: minx,
  1961. y: miny
  1962. };
  1963. };
  1964. /**
  1965. * Gets the colors being tracked by the `ColorTracker` instance.
  1966. * @return {Array.<string>}
  1967. */
  1968. tracking.ColorTracker.prototype.getColors = function() {
  1969. return this.colors;
  1970. };
  1971. /**
  1972. * Gets the minimum dimension to classify a rectangle.
  1973. * @return {number}
  1974. */
  1975. tracking.ColorTracker.prototype.getMinDimension = function() {
  1976. return this.minDimension;
  1977. };
  1978. /**
  1979. * Gets the maximum dimension to classify a rectangle.
  1980. * @return {number}
  1981. */
  1982. tracking.ColorTracker.prototype.getMaxDimension = function() {
  1983. return this.maxDimension;
  1984. };
  1985. /**
  1986. * Gets the minimum group size to be classified as a rectangle.
  1987. * @return {number}
  1988. */
  1989. tracking.ColorTracker.prototype.getMinGroupSize = function() {
  1990. return this.minGroupSize;
  1991. };
  1992. /**
  1993. * Gets the eight offset values of the neighbours surrounding a pixel.
  1994. * @param {number} width The image width.
  1995. * @return {array} Array with the eight offset values of the neighbours
  1996. * surrounding a pixel.
  1997. * @private
  1998. */
  1999. tracking.ColorTracker.prototype.getNeighboursForWidth_ = function(width) {
  2000. if (tracking.ColorTracker.neighbours_[width]) {
  2001. return tracking.ColorTracker.neighbours_[width];
  2002. }
  2003. var neighbours = new Int32Array(8);
  2004. neighbours[0] = -width * 4;
  2005. neighbours[1] = -width * 4 + 4;
  2006. neighbours[2] = 4;
  2007. neighbours[3] = width * 4 + 4;
  2008. neighbours[4] = width * 4;
  2009. neighbours[5] = width * 4 - 4;
  2010. neighbours[6] = -4;
  2011. neighbours[7] = -width * 4 - 4;
  2012. tracking.ColorTracker.neighbours_[width] = neighbours;
  2013. return neighbours;
  2014. };
  2015. /**
  2016. * Unites groups whose bounding box intersect with each other.
  2017. * @param {Array.<Object>} rects
  2018. * @private
  2019. */
  2020. tracking.ColorTracker.prototype.mergeRectangles_ = function(rects) {
  2021. var intersects;
  2022. var results = [];
  2023. var minDimension = this.getMinDimension();
  2024. var maxDimension = this.getMaxDimension();
  2025. for (var r = 0; r < rects.length; r++) {
  2026. var r1 = rects[r];
  2027. intersects = true;
  2028. for (var s = r + 1; s < rects.length; s++) {
  2029. var r2 = rects[s];
  2030. if (tracking.Math.intersectRect(r1.x, r1.y, r1.x + r1.width, r1.y + r1.height, r2.x, r2.y, r2.x + r2.width, r2.y + r2.height)) {
  2031. intersects = false;
  2032. var x1 = Math.min(r1.x, r2.x);
  2033. var y1 = Math.min(r1.y, r2.y);
  2034. var x2 = Math.max(r1.x + r1.width, r2.x + r2.width);
  2035. var y2 = Math.max(r1.y + r1.height, r2.y + r2.height);
  2036. r2.height = y2 - y1;
  2037. r2.width = x2 - x1;
  2038. r2.x = x1;
  2039. r2.y = y1;
  2040. break;
  2041. }
  2042. }
  2043. if (intersects) {
  2044. if (r1.width >= minDimension && r1.height >= minDimension) {
  2045. if (r1.width <= maxDimension && r1.height <= maxDimension) {
  2046. results.push(r1);
  2047. }
  2048. }
  2049. }
  2050. }
  2051. return results;
  2052. };
  2053. /**
  2054. * Sets the colors to be tracked by the `ColorTracker` instance.
  2055. * @param {Array.<string>} colors
  2056. */
  2057. tracking.ColorTracker.prototype.setColors = function(colors) {
  2058. this.colors = colors;
  2059. };
  2060. /**
  2061. * Sets the minimum dimension to classify a rectangle.
  2062. * @param {number} minDimension
  2063. */
  2064. tracking.ColorTracker.prototype.setMinDimension = function(minDimension) {
  2065. this.minDimension = minDimension;
  2066. };
  2067. /**
  2068. * Sets the maximum dimension to classify a rectangle.
  2069. * @param {number} maxDimension
  2070. */
  2071. tracking.ColorTracker.prototype.setMaxDimension = function(maxDimension) {
  2072. this.maxDimension = maxDimension;
  2073. };
  2074. /**
  2075. * Sets the minimum group size to be classified as a rectangle.
  2076. * @param {number} minGroupSize
  2077. */
  2078. tracking.ColorTracker.prototype.setMinGroupSize = function(minGroupSize) {
  2079. this.minGroupSize = minGroupSize;
  2080. };
  2081. /**
  2082. * Tracks the `Video` frames. This method is called for each video frame in
  2083. * order to emit `track` event.
  2084. * @param {Uint8ClampedArray} pixels The pixels data to track.
  2085. * @param {number} width The pixels canvas width.
  2086. * @param {number} height The pixels canvas height.
  2087. */
  2088. tracking.ColorTracker.prototype.track = function(pixels, width, height) {
  2089. var self = this;
  2090. var colors = this.getColors();
  2091. if (!colors) {
  2092. throw new Error('Colors not specified, try `new tracking.ColorTracker("magenta")`.');
  2093. }
  2094. var results = [];
  2095. colors.forEach(function(color) {
  2096. results = results.concat(self.trackColor_(pixels, width, height, color));
  2097. });
  2098. this.emit('track', {
  2099. data: results
  2100. });
  2101. };
  2102. /**
  2103. * Find the given color in the given matrix of pixels using Flood fill
  2104. * algorithm to determines the area connected to a given node in a
  2105. * multi-dimensional array.
  2106. * @param {Uint8ClampedArray} pixels The pixels data to track.
  2107. * @param {number} width The pixels canvas width.
  2108. * @param {number} height The pixels canvas height.
  2109. * @param {string} color The color to be found
  2110. * @private
  2111. */
  2112. tracking.ColorTracker.prototype.trackColor_ = function(pixels, width, height, color) {
  2113. var colorFn = tracking.ColorTracker.knownColors_[color];
  2114. var currGroup = new Int32Array(pixels.length >> 2);
  2115. var currGroupSize;
  2116. var currI;
  2117. var currJ;
  2118. var currW;
  2119. var marked = new Int8Array(pixels.length);
  2120. var minGroupSize = this.getMinGroupSize();
  2121. var neighboursW = this.getNeighboursForWidth_(width);
  2122. var queue = new Int32Array(pixels.length);
  2123. var queuePosition;
  2124. var results = [];
  2125. var w = -4;
  2126. if (!colorFn) {
  2127. return results;
  2128. }
  2129. for (var i = 0; i < height; i++) {
  2130. for (var j = 0; j < width; j++) {
  2131. w += 4;
  2132. if (marked[w]) {
  2133. continue;
  2134. }
  2135. currGroupSize = 0;
  2136. queuePosition = -1;
  2137. queue[++queuePosition] = w;
  2138. queue[++queuePosition] = i;
  2139. queue[++queuePosition] = j;
  2140. marked[w] = 1;
  2141. while (queuePosition >= 0) {
  2142. currJ = queue[queuePosition--];
  2143. currI = queue[queuePosition--];
  2144. currW = queue[queuePosition--];
  2145. if (colorFn(pixels[currW], pixels[currW + 1], pixels[currW + 2], pixels[currW + 3], currW, currI, currJ)) {
  2146. currGroup[currGroupSize++] = currJ;
  2147. currGroup[currGroupSize++] = currI;
  2148. for (var k = 0; k < neighboursW.length; k++) {
  2149. var otherW = currW + neighboursW[k];
  2150. var otherI = currI + neighboursI[k];
  2151. var otherJ = currJ + neighboursJ[k];
  2152. if (!marked[otherW] && otherI >= 0 && otherI < height && otherJ >= 0 && otherJ < width) {
  2153. queue[++queuePosition] = otherW;
  2154. queue[++queuePosition] = otherI;
  2155. queue[++queuePosition] = otherJ;
  2156. marked[otherW] = 1;
  2157. }
  2158. }
  2159. }
  2160. }
  2161. if (currGroupSize >= minGroupSize) {
  2162. var data = this.calculateDimensions_(currGroup, currGroupSize);
  2163. if (data) {
  2164. data.color = color;
  2165. results.push(data);
  2166. }
  2167. }
  2168. }
  2169. }
  2170. return this.mergeRectangles_(results);
  2171. };
  2172. // Default colors
  2173. //===================
  2174. tracking.ColorTracker.registerColor('cyan', function(r, g, b) {
  2175. var thresholdGreen = 50,
  2176. thresholdBlue = 70,
  2177. dx = r - 0,
  2178. dy = g - 255,
  2179. dz = b - 255;
  2180. if ((g - r) >= thresholdGreen && (b - r) >= thresholdBlue) {
  2181. return true;
  2182. }
  2183. return dx * dx + dy * dy + dz * dz < 6400;
  2184. });
  2185. tracking.ColorTracker.registerColor('magenta', function(r, g, b) {
  2186. var threshold = 50,
  2187. dx = r - 255,
  2188. dy = g - 0,
  2189. dz = b - 255;
  2190. if ((r - g) >= threshold && (b - g) >= threshold) {
  2191. return true;
  2192. }
  2193. return dx * dx + dy * dy + dz * dz < 19600;
  2194. });
  2195. tracking.ColorTracker.registerColor('yellow', function(r, g, b) {
  2196. var threshold = 50,
  2197. dx = r - 255,
  2198. dy = g - 255,
  2199. dz = b - 0;
  2200. if ((r - b) >= threshold && (g - b) >= threshold) {
  2201. return true;
  2202. }
  2203. return dx * dx + dy * dy + dz * dz < 10000;
  2204. });
  2205. // Caching neighbour i/j offset values.
  2206. //=====================================
  2207. var neighboursI = new Int32Array([-1, -1, 0, 1, 1, 1, 0, -1]);
  2208. var neighboursJ = new Int32Array([0, 1, 1, 1, 0, -1, -1, -1]);
  2209. }());
  2210. (function() {
  2211. /**
  2212. * ObjectTracker utility.
  2213. * @constructor
  2214. * @param {string|Array.<string|Array.<number>>} opt_classifiers Optional
  2215. * object classifiers to track.
  2216. * @extends {tracking.Tracker}
  2217. */
  2218. tracking.ObjectTracker = function(opt_classifiers) {
  2219. tracking.ObjectTracker.base(this, 'constructor');
  2220. if (opt_classifiers) {
  2221. if (!Array.isArray(opt_classifiers)) {
  2222. opt_classifiers = [opt_classifiers];
  2223. }
  2224. if (Array.isArray(opt_classifiers)) {
  2225. opt_classifiers.forEach(function(classifier, i) {
  2226. if (typeof classifier === 'string') {
  2227. opt_classifiers[i] = tracking.ViolaJones.classifiers[classifier];
  2228. }
  2229. if (!opt_classifiers[i]) {
  2230. throw new Error('Object classifier not valid, try `new tracking.ObjectTracker("face")`.');
  2231. }
  2232. });
  2233. }
  2234. }
  2235. this.setClassifiers(opt_classifiers);
  2236. };
  2237. tracking.inherits(tracking.ObjectTracker, tracking.Tracker);
  2238. /**
  2239. * Specifies the edges density of a block in order to decide whether to skip
  2240. * it or not.
  2241. * @default 0.2
  2242. * @type {number}
  2243. */
  2244. tracking.ObjectTracker.prototype.edgesDensity = 0.2;
  2245. /**
  2246. * Specifies the initial scale to start the feature block scaling.
  2247. * @default 1.0
  2248. * @type {number}
  2249. */
  2250. tracking.ObjectTracker.prototype.initialScale = 1.0;
  2251. /**
  2252. * Specifies the scale factor to scale the feature block.
  2253. * @default 1.25
  2254. * @type {number}
  2255. */
  2256. tracking.ObjectTracker.prototype.scaleFactor = 1.25;
  2257. /**
  2258. * Specifies the block step size.
  2259. * @default 1.5
  2260. * @type {number}
  2261. */
  2262. tracking.ObjectTracker.prototype.stepSize = 1.5;
  2263. /**
  2264. * Gets the tracker HAAR classifiers.
  2265. * @return {TypedArray.<number>}
  2266. */
  2267. tracking.ObjectTracker.prototype.getClassifiers = function() {
  2268. return this.classifiers;
  2269. };
  2270. /**
  2271. * Gets the edges density value.
  2272. * @return {number}
  2273. */
  2274. tracking.ObjectTracker.prototype.getEdgesDensity = function() {
  2275. return this.edgesDensity;
  2276. };
  2277. /**
  2278. * Gets the initial scale to start the feature block scaling.
  2279. * @return {number}
  2280. */
  2281. tracking.ObjectTracker.prototype.getInitialScale = function() {
  2282. return this.initialScale;
  2283. };
  2284. /**
  2285. * Gets the scale factor to scale the feature block.
  2286. * @return {number}
  2287. */
  2288. tracking.ObjectTracker.prototype.getScaleFactor = function() {
  2289. return this.scaleFactor;
  2290. };
  2291. /**
  2292. * Gets the block step size.
  2293. * @return {number}
  2294. */
  2295. tracking.ObjectTracker.prototype.getStepSize = function() {
  2296. return this.stepSize;
  2297. };
  2298. /**
  2299. * Tracks the `Video` frames. This method is called for each video frame in
  2300. * order to emit `track` event.
  2301. * @param {Uint8ClampedArray} pixels The pixels data to track.
  2302. * @param {number} width The pixels canvas width.
  2303. * @param {number} height The pixels canvas height.
  2304. */
  2305. tracking.ObjectTracker.prototype.track = function(pixels, width, height) {
  2306. var self = this;
  2307. var classifiers = this.getClassifiers();
  2308. if (!classifiers) {
  2309. throw new Error('Object classifier not specified, try `new tracking.ObjectTracker("face")`.');
  2310. }
  2311. var results = [];
  2312. classifiers.forEach(function(classifier) {
  2313. results = results.concat(tracking.ViolaJones.detect(pixels, width, height, self.getInitialScale(), self.getScaleFactor(), self.getStepSize(), self.getEdgesDensity(), classifier));
  2314. });
  2315. this.emit('track', {
  2316. data: results
  2317. });
  2318. };
  2319. /**
  2320. * Sets the tracker HAAR classifiers.
  2321. * @param {TypedArray.<number>} classifiers
  2322. */
  2323. tracking.ObjectTracker.prototype.setClassifiers = function(classifiers) {
  2324. this.classifiers = classifiers;
  2325. };
  2326. /**
  2327. * Sets the edges density.
  2328. * @param {number} edgesDensity
  2329. */
  2330. tracking.ObjectTracker.prototype.setEdgesDensity = function(edgesDensity) {
  2331. this.edgesDensity = edgesDensity;
  2332. };
  2333. /**
  2334. * Sets the initial scale to start the block scaling.
  2335. * @param {number} initialScale
  2336. */
  2337. tracking.ObjectTracker.prototype.setInitialScale = function(initialScale) {
  2338. this.initialScale = initialScale;
  2339. };
  2340. /**
  2341. * Sets the scale factor to scale the feature block.
  2342. * @param {number} scaleFactor
  2343. */
  2344. tracking.ObjectTracker.prototype.setScaleFactor = function(scaleFactor) {
  2345. this.scaleFactor = scaleFactor;
  2346. };
  2347. /**
  2348. * Sets the block step size.
  2349. * @param {number} stepSize
  2350. */
  2351. tracking.ObjectTracker.prototype.setStepSize = function(stepSize) {
  2352. this.stepSize = stepSize;
  2353. };
  2354. }());
  2355. (function() {
  2356. tracking.LandmarksTracker = function() {
  2357. tracking.LandmarksTracker.base(this, 'constructor');
  2358. }
  2359. tracking.inherits(tracking.LandmarksTracker, tracking.ObjectTracker);
  2360. tracking.LandmarksTracker.prototype.track = function(pixels, width, height) {
  2361. var image = {
  2362. 'data': pixels,
  2363. 'width': width,
  2364. 'height': height
  2365. };
  2366. var classifier = tracking.ViolaJones.classifiers['face'];
  2367. var faces = tracking.ViolaJones.detect(pixels, width, height,
  2368. this.getInitialScale(), this.getScaleFactor(), this.getStepSize(),
  2369. this.getEdgesDensity(), classifier);
  2370. var landmarks = tracking.LBF.align(pixels, width, height, faces);
  2371. this.emit('track', {
  2372. 'data': {
  2373. 'faces' : faces,
  2374. 'landmarks' : landmarks
  2375. }
  2376. });
  2377. }
  2378. }());
  2379. (function() {
  2380. tracking.LBF = {};
  2381. /**
  2382. * LBF Regressor utility.
  2383. * @constructor
  2384. */
  2385. tracking.LBF.Regressor = function(maxNumStages){
  2386. this.maxNumStages = maxNumStages;
  2387. this.rfs = new Array(maxNumStages);
  2388. this.models = new Array(maxNumStages);
  2389. for(var i=0; i < maxNumStages; i++){
  2390. this.rfs[i] = new tracking.LBF.RandomForest(i);
  2391. this.models[i] = tracking.LBF.RegressorData[i].models;
  2392. }
  2393. this.meanShape = tracking.LBF.LandmarksData;
  2394. }
  2395. /**
  2396. * Predicts the position of the landmarks based on the bounding box of the face.
  2397. * @param {pixels} pixels The grayscale pixels in a linear array.
  2398. * @param {number} width Width of the image.
  2399. * @param {number} height Height of the image.
  2400. * @param {object} boudingBox Bounding box of the face to be aligned.
  2401. * @return {matrix} A matrix with each landmark position in a row [x,y].
  2402. */
  2403. tracking.LBF.Regressor.prototype.predict = function(pixels, width, height, boundingBox) {
  2404. var images = [];
  2405. var currentShapes = [];
  2406. var boundingBoxes = [];
  2407. var meanShapeClone = tracking.Matrix.clone(this.meanShape);
  2408. images.push({
  2409. 'data': pixels,
  2410. 'width': width,
  2411. 'height': height
  2412. });
  2413. boundingBoxes.push(boundingBox);
  2414. currentShapes.push(tracking.LBF.projectShapeToBoundingBox_(meanShapeClone, boundingBox));
  2415. for(var stage = 0; stage < this.maxNumStages; stage++){
  2416. var binaryFeatures = tracking.LBF.Regressor.deriveBinaryFeat(this.rfs[stage], images, currentShapes, boundingBoxes, meanShapeClone);
  2417. this.applyGlobalPrediction(binaryFeatures, this.models[stage], currentShapes, boundingBoxes);
  2418. }
  2419. return currentShapes[0];
  2420. };
  2421. /**
  2422. * Multiplies the binary features of the landmarks with the regression matrix
  2423. * to obtain the displacement for each landmark. Then applies this displacement
  2424. * into the landmarks shape.
  2425. * @param {object} binaryFeatures The binary features for the landmarks.
  2426. * @param {object} models The regressor models.
  2427. * @param {matrix} currentShapes The landmarks shapes.
  2428. * @param {array} boudingBoxes The bounding boxes of the faces.
  2429. */
  2430. tracking.LBF.Regressor.prototype.applyGlobalPrediction = function(binaryFeatures, models, currentShapes,
  2431. boundingBoxes){
  2432. var residual = currentShapes[0].length * 2;
  2433. var rotation = [];
  2434. var deltashape = new Array(residual/2);
  2435. for(var i=0; i < residual/2; i++){
  2436. deltashape[i] = [0.0, 0.0];
  2437. }
  2438. for(var i=0; i < currentShapes.length; i++){
  2439. for(var j=0; j < residual; j++){
  2440. var tmp = 0;
  2441. for(var lx=0, idx=0; (idx = binaryFeatures[i][lx].index) != -1; lx++){
  2442. if(idx <= models[j].nr_feature){
  2443. tmp += models[j].data[(idx - 1)] * binaryFeatures[i][lx].value;
  2444. }
  2445. }
  2446. if(j < residual/2){
  2447. deltashape[j][0] = tmp;
  2448. }else{
  2449. deltashape[j - residual/2][1] = tmp;
  2450. }
  2451. }
  2452. var res = tracking.LBF.similarityTransform_(tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]), this.meanShape);
  2453. var rotation = tracking.Matrix.transpose(res[0]);
  2454. var s = tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]);
  2455. s = tracking.Matrix.add(s, deltashape);
  2456. currentShapes[i] = tracking.LBF.projectShapeToBoundingBox_(s, boundingBoxes[i]);
  2457. }
  2458. };
  2459. /**
  2460. * Derives the binary features from the image for each landmark.
  2461. * @param {object} forest The random forest to search for the best binary feature match.
  2462. * @param {array} images The images with pixels in a grayscale linear array.
  2463. * @param {array} currentShapes The current landmarks shape.
  2464. * @param {array} boudingBoxes The bounding boxes of the faces.
  2465. * @param {matrix} meanShape The mean shape of the current landmarks set.
  2466. * @return {array} The binary features extracted from the image and matched with the
  2467. * training data.
  2468. * @static
  2469. */
  2470. tracking.LBF.Regressor.deriveBinaryFeat = function(forest, images, currentShapes, boundingBoxes, meanShape){
  2471. var binaryFeatures = new Array(images.length);
  2472. for(var i=0; i < images.length; i++){
  2473. var t = forest.maxNumTrees * forest.landmarkNum + 1;
  2474. binaryFeatures[i] = new Array(t);
  2475. for(var j=0; j < t; j++){
  2476. binaryFeatures[i][j] = {};
  2477. }
  2478. }
  2479. var leafnodesPerTree = 1 << (forest.maxDepth - 1);
  2480. for(var i=0; i < images.length; i++){
  2481. var projectedShape = tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]);
  2482. var transform = tracking.LBF.similarityTransform_(projectedShape, meanShape);
  2483. for(var j=0; j < forest.landmarkNum; j++){
  2484. for(var k=0; k < forest.maxNumTrees; k++){
  2485. var binaryCode = tracking.LBF.Regressor.getCodeFromTree(forest.rfs[j][k], images[i],
  2486. currentShapes[i], boundingBoxes[i], transform[0], transform[1]);
  2487. var index = j*forest.maxNumTrees + k;
  2488. binaryFeatures[i][index].index = leafnodesPerTree * index + binaryCode;
  2489. binaryFeatures[i][index].value = 1;
  2490. }
  2491. }
  2492. binaryFeatures[i][forest.landmarkNum * forest.maxNumTrees].index = -1;
  2493. binaryFeatures[i][forest.landmarkNum * forest.maxNumTrees].value = -1;
  2494. }
  2495. return binaryFeatures;
  2496. }
  2497. /**
  2498. * Gets the binary code for a specific tree in a random forest. For each landmark,
  2499. * the position from two pre-defined points are recovered from the training data
  2500. * and then the intensity of the pixels corresponding to these points is extracted
  2501. * from the image and used to traverse the trees in the random forest. At the end,
  2502. * the ending nodes will be represented by 1, and the remaining nodes by 0.
  2503. *
  2504. * +--------------------------- Random Forest -----------------------------+
  2505. * | Ø = Ending leaf |
  2506. * | |
  2507. * | O O O O O |
  2508. * | / \ / \ / \ / \ / \ |
  2509. * | O O O O O O O O O O |
  2510. * | / \ / \ / \ / \ / \ / \ / \ / \ / \ / \ |
  2511. * | Ø O O O O O Ø O O Ø O O O O Ø O O O O Ø |
  2512. * | 1 0 0 0 0 0 1 0 0 1 0 0 0 0 1 0 0 0 0 1 |
  2513. * +-----------------------------------------------------------------------+
  2514. * Final binary code for this landmark: 10000010010000100001
  2515. *
  2516. * @param {object} forest The tree to be analyzed.
  2517. * @param {array} image The image with pixels in a grayscale linear array.
  2518. * @param {matrix} shape The current landmarks shape.
  2519. * @param {object} boudingBoxes The bounding box of the face.
  2520. * @param {matrix} rotation The rotation matrix used to transform the projected landmarks
  2521. * into the mean shape.
  2522. * @param {number} scale The scale factor used to transform the projected landmarks
  2523. * into the mean shape.
  2524. * @return {number} The binary code extracted from the tree.
  2525. * @static
  2526. */
  2527. tracking.LBF.Regressor.getCodeFromTree = function(tree, image, shape, boundingBox, rotation, scale){
  2528. var current = 0;
  2529. var bincode = 0;
  2530. while(true){
  2531. var x1 = Math.cos(tree.nodes[current].feats[0]) * tree.nodes[current].feats[2] * tree.maxRadioRadius * boundingBox.width;
  2532. var y1 = Math.sin(tree.nodes[current].feats[0]) * tree.nodes[current].feats[2] * tree.maxRadioRadius * boundingBox.height;
  2533. var x2 = Math.cos(tree.nodes[current].feats[1]) * tree.nodes[current].feats[3] * tree.maxRadioRadius * boundingBox.width;
  2534. var y2 = Math.sin(tree.nodes[current].feats[1]) * tree.nodes[current].feats[3] * tree.maxRadioRadius * boundingBox.height;
  2535. var project_x1 = rotation[0][0] * x1 + rotation[0][1] * y1;
  2536. var project_y1 = rotation[1][0] * x1 + rotation[1][1] * y1;
  2537. var real_x1 = Math.floor(project_x1 + shape[tree.landmarkID][0]);
  2538. var real_y1 = Math.floor(project_y1 + shape[tree.landmarkID][1]);
  2539. real_x1 = Math.max(0.0, Math.min(real_x1, image.height - 1.0));
  2540. real_y1 = Math.max(0.0, Math.min(real_y1, image.width - 1.0));
  2541. var project_x2 = rotation[0][0] * x2 + rotation[0][1] * y2;
  2542. var project_y2 = rotation[1][0] * x2 + rotation[1][1] * y2;
  2543. var real_x2 = Math.floor(project_x2 + shape[tree.landmarkID][0]);
  2544. var real_y2 = Math.floor(project_y2 + shape[tree.landmarkID][1]);
  2545. real_x2 = Math.max(0.0, Math.min(real_x2, image.height - 1.0));
  2546. real_y2 = Math.max(0.0, Math.min(real_y2, image.width - 1.0));
  2547. var pdf = Math.floor(image.data[real_y1*image.width + real_x1]) -
  2548. Math.floor(image.data[real_y2 * image.width +real_x2]);
  2549. if(pdf < tree.nodes[current].thresh){
  2550. current = tree.nodes[current].cnodes[0];
  2551. }else{
  2552. current = tree.nodes[current].cnodes[1];
  2553. }
  2554. if (tree.nodes[current].is_leafnode == 1) {
  2555. bincode = 1;
  2556. for (var i=0; i < tree.leafnodes.length; i++) {
  2557. if (tree.leafnodes[i] == current) {
  2558. return bincode;
  2559. }
  2560. bincode++;
  2561. }
  2562. return bincode;
  2563. }
  2564. }
  2565. return bincode;
  2566. }
  2567. }());
  2568. (function() {
  2569. /**
  2570. * Face Alignment via Regressing Local Binary Features (LBF)
  2571. * This approach has two components: a set of local binary features and
  2572. * a locality principle for learning those features.
  2573. * The locality principle is used to guide the learning of a set of highly
  2574. * discriminative local binary features for each landmark independently.
  2575. * The obtained local binary features are used to learn a linear regression
  2576. * that later will be used to guide the landmarks in the alignment phase.
  2577. *
  2578. * @authors: VoxarLabs Team (http://cin.ufpe.br/~voxarlabs)
  2579. * Lucas Figueiredo <lsf@cin.ufpe.br>, Thiago Menezes <tmc2@cin.ufpe.br>,
  2580. * Thiago Domingues <tald@cin.ufpe.br>, Rafael Roberto <rar3@cin.ufpe.br>,
  2581. * Thulio Araujo <tlsa@cin.ufpe.br>, Joao Victor <jvfl@cin.ufpe.br>,
  2582. * Tomer Simis <tls@cin.ufpe.br>)
  2583. */
  2584. /**
  2585. * Holds the maximum number of stages that will be used in the alignment algorithm.
  2586. * Each stage contains a different set of random forests and retrieves the binary
  2587. * code from a more "specialized" (i.e. smaller) region around the landmarks.
  2588. * @type {number}
  2589. * @static
  2590. */
  2591. tracking.LBF.maxNumStages = 4;
  2592. /**
  2593. * Holds the regressor that will be responsible for extracting the local features from
  2594. * the image and guide the landmarks using the training data.
  2595. * @type {object}
  2596. * @protected
  2597. * @static
  2598. */
  2599. tracking.LBF.regressor_ = null;
  2600. /**
  2601. * Generates a set of landmarks for a set of faces
  2602. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  2603. * @param {number} width The image width.
  2604. * @param {number} height The image height.
  2605. * @param {array} faces The list of faces detected in the image
  2606. * @return {array} The aligned landmarks, each set of landmarks corresponding
  2607. * to a specific face.
  2608. * @static
  2609. */
  2610. tracking.LBF.align = function(pixels, width, height, faces){
  2611. if(tracking.LBF.regressor_ == null){
  2612. tracking.LBF.regressor_ = new tracking.LBF.Regressor(
  2613. tracking.LBF.maxNumStages
  2614. );
  2615. }
  2616. pixels = tracking.Image.grayscale(pixels, width, height, false);
  2617. pixels = tracking.Image.equalizeHist(pixels, width, height);
  2618. var shapes = new Array(faces.length);
  2619. for(var i in faces){
  2620. faces[i].height = faces[i].width;
  2621. var boundingBox = {};
  2622. boundingBox.startX = faces[i].x;
  2623. boundingBox.startY = faces[i].y;
  2624. boundingBox.width = faces[i].width;
  2625. boundingBox.height = faces[i].height;
  2626. shapes[i] = tracking.LBF.regressor_.predict(pixels, width, height, boundingBox);
  2627. }
  2628. return shapes;
  2629. }
  2630. /**
  2631. * Unprojects the landmarks shape from the bounding box.
  2632. * @param {matrix} shape The landmarks shape.
  2633. * @param {matrix} boudingBox The bounding box.
  2634. * @return {matrix} The landmarks shape projected into the bounding box.
  2635. * @static
  2636. * @protected
  2637. */
  2638. tracking.LBF.unprojectShapeToBoundingBox_ = function(shape, boundingBox){
  2639. var temp = new Array(shape.length);
  2640. for(var i=0; i < shape.length; i++){
  2641. temp[i] = [
  2642. (shape[i][0] - boundingBox.startX) / boundingBox.width,
  2643. (shape[i][1] - boundingBox.startY) / boundingBox.height
  2644. ];
  2645. }
  2646. return temp;
  2647. }
  2648. /**
  2649. * Projects the landmarks shape into the bounding box. The landmarks shape has
  2650. * normalized coordinates, so it is necessary to map these coordinates into
  2651. * the bounding box coordinates.
  2652. * @param {matrix} shape The landmarks shape.
  2653. * @param {matrix} boudingBox The bounding box.
  2654. * @return {matrix} The landmarks shape.
  2655. * @static
  2656. * @protected
  2657. */
  2658. tracking.LBF.projectShapeToBoundingBox_ = function(shape, boundingBox){
  2659. var temp = new Array(shape.length);
  2660. for(var i=0; i < shape.length; i++){
  2661. temp[i] = [
  2662. shape[i][0] * boundingBox.width + boundingBox.startX,
  2663. shape[i][1] * boundingBox.height + boundingBox.startY
  2664. ];
  2665. }
  2666. return temp;
  2667. }
  2668. /**
  2669. * Calculates the rotation and scale necessary to transform shape1 into shape2.
  2670. * @param {matrix} shape1 The shape to be transformed.
  2671. * @param {matrix} shape2 The shape to be transformed in.
  2672. * @return {[matrix, scalar]} The rotation matrix and scale that applied to shape1
  2673. * results in shape2.
  2674. * @static
  2675. * @protected
  2676. */
  2677. tracking.LBF.similarityTransform_ = function(shape1, shape2){
  2678. var center1 = [0,0];
  2679. var center2 = [0,0];
  2680. for (var i = 0; i < shape1.length; i++) {
  2681. center1[0] += shape1[i][0];
  2682. center1[1] += shape1[i][1];
  2683. center2[0] += shape2[i][0];
  2684. center2[1] += shape2[i][1];
  2685. }
  2686. center1[0] /= shape1.length;
  2687. center1[1] /= shape1.length;
  2688. center2[0] /= shape2.length;
  2689. center2[1] /= shape2.length;
  2690. var temp1 = tracking.Matrix.clone(shape1);
  2691. var temp2 = tracking.Matrix.clone(shape2);
  2692. for(var i=0; i < shape1.length; i++){
  2693. temp1[i][0] -= center1[0];
  2694. temp1[i][1] -= center1[1];
  2695. temp2[i][0] -= center2[0];
  2696. temp2[i][1] -= center2[1];
  2697. }
  2698. var covariance1, covariance2;
  2699. var mean1, mean2;
  2700. var t = tracking.Matrix.calcCovarMatrix(temp1);
  2701. covariance1 = t[0];
  2702. mean1 = t[1];
  2703. t = tracking.Matrix.calcCovarMatrix(temp2);
  2704. covariance2 = t[0];
  2705. mean2 = t[1];
  2706. var s1 = Math.sqrt(tracking.Matrix.norm(covariance1));
  2707. var s2 = Math.sqrt(tracking.Matrix.norm(covariance2));
  2708. var scale = s1/s2;
  2709. temp1 = tracking.Matrix.mulScalar(1.0/s1, temp1);
  2710. temp2 = tracking.Matrix.mulScalar(1.0/s2, temp2);
  2711. var num = 0, den = 0;
  2712. for (var i = 0; i < shape1.length; i++) {
  2713. num = num + temp1[i][1] * temp2[i][0] - temp1[i][0] * temp2[i][1];
  2714. den = den + temp1[i][0] * temp2[i][0] + temp1[i][1] * temp2[i][1];
  2715. }
  2716. var norm = Math.sqrt(num*num + den*den);
  2717. var sin_theta = num/norm;
  2718. var cos_theta = den/norm;
  2719. var rotation = [
  2720. [cos_theta, -sin_theta],
  2721. [sin_theta, cos_theta]
  2722. ];
  2723. return [rotation, scale];
  2724. }
  2725. /**
  2726. * LBF Random Forest data structure.
  2727. * @static
  2728. * @constructor
  2729. */
  2730. tracking.LBF.RandomForest = function(forestIndex){
  2731. this.maxNumTrees = tracking.LBF.RegressorData[forestIndex].max_numtrees;
  2732. this.landmarkNum = tracking.LBF.RegressorData[forestIndex].num_landmark;
  2733. this.maxDepth = tracking.LBF.RegressorData[forestIndex].max_depth;
  2734. this.stages = tracking.LBF.RegressorData[forestIndex].stages;
  2735. this.rfs = new Array(this.landmarkNum);
  2736. for(var i=0; i < this.landmarkNum; i++){
  2737. this.rfs[i] = new Array(this.maxNumTrees);
  2738. for(var j=0; j < this.maxNumTrees; j++){
  2739. this.rfs[i][j] = new tracking.LBF.Tree(forestIndex, i, j);
  2740. }
  2741. }
  2742. }
  2743. /**
  2744. * LBF Tree data structure.
  2745. * @static
  2746. * @constructor
  2747. */
  2748. tracking.LBF.Tree = function(forestIndex, landmarkIndex, treeIndex){
  2749. var data = tracking.LBF.RegressorData[forestIndex].landmarks[landmarkIndex][treeIndex];
  2750. this.maxDepth = data.max_depth;
  2751. this.maxNumNodes = data.max_numnodes;
  2752. this.nodes = data.nodes;
  2753. this.landmarkID = data.landmark_id;
  2754. this.numLeafnodes = data.num_leafnodes;
  2755. this.numNodes = data.num_nodes;
  2756. this.maxNumFeats = data.max_numfeats;
  2757. this.maxRadioRadius = data.max_radio_radius;
  2758. this.leafnodes = data.id_leafnodes;
  2759. }
  2760. }());