index.d.ts 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. /**
  2. * @fileoverview Declarations for the hand tracking API.
  3. */
  4. /**
  5. * Version number of this package.
  6. */
  7. export const VERSION: string;
  8. /**
  9. * Represents pairs of (start,end) indexes so that we can connect landmarks
  10. * with lines to provide a skeleton when we draw the points.
  11. */
  12. export declare type LandmarkConnectionArray = Array<[number, number]>;
  13. /**
  14. * HandEvent.onHand returns an array of landmarks. This array provides the
  15. * edges to connect those landmarks to one another.
  16. */
  17. export declare const HAND_CONNECTIONS: LandmarkConnectionArray;
  18. /**
  19. * Represents a single normalized landmark.
  20. */
  21. export declare interface NormalizedLandmark {
  22. x: number;
  23. y: number;
  24. z: number;
  25. visibility?: number;
  26. }
  27. /**
  28. * We support several ways to get image inputs.
  29. */
  30. export type InputImage = HTMLVideoElement | HTMLImageElement | HTMLCanvasElement;
  31. /**
  32. * Legal inputs.
  33. */
  34. export interface InputMap {
  35. image: InputImage;
  36. }
  37. /**
  38. * One list of landmarks.
  39. */
  40. export type NormalizedLandmarkList = NormalizedLandmark[];
  41. /**
  42. * Multiple lists of landmarks.
  43. */
  44. export type NormalizedLandmarkListList = NormalizedLandmarkList[];
  45. /**
  46. * Represents a single landmark (not normalized).
  47. */
  48. export interface Landmark extends NormalizedLandmark {}
  49. /**
  50. * Detected points are returned as a collection of landmarks.
  51. */
  52. export type LandmarkList = Landmark[];
  53. /**
  54. * Detected points are returned as a collection of landmarks.
  55. */
  56. export type LandmarkListList = LandmarkList[];
  57. /**
  58. * GpuBuffers should all be compatible with Canvas' `drawImage`
  59. */
  60. type GpuBuffer = HTMLCanvasElement|HTMLImageElement|ImageBitmap;
  61. /**
  62. * The descriptiong of the hand represented by the corresponding landmarks.
  63. */
  64. export interface Handedness {
  65. /**
  66. * Index of the object as it appears in multiHandLandmarks.
  67. */
  68. index: number;
  69. /**
  70. * Confidence score between 0..1.
  71. */
  72. score: number;
  73. /**
  74. * Identifies which hand is detected at this index.
  75. */
  76. label: 'Right'|'Left';
  77. }
  78. /**
  79. * Possible results from Hands.
  80. */
  81. export interface Results {
  82. multiHandLandmarks: NormalizedLandmarkListList;
  83. multiHandWorldLandmarks: LandmarkListList;
  84. multiHandedness: Handedness[];
  85. image: GpuBuffer;
  86. }
  87. /**
  88. * Configurable options for Hands.
  89. */
  90. export interface Options {
  91. selfieMode?: boolean;
  92. maxNumHands?: number;
  93. modelComplexity?: 0|1;
  94. minDetectionConfidence?: number;
  95. minTrackingConfidence?: number;
  96. }
  97. /**
  98. * Listener for any results from Hands.
  99. */
  100. export type ResultsListener = (results: Results) => (Promise<void>|void);
  101. /**
  102. * Contains all of the setup options to drive the hand solution.
  103. */
  104. export interface HandsConfig {
  105. locateFile?: (path: string, prefix?: string) => string;
  106. }
  107. /**
  108. * Declares the interface of Hands.
  109. */
  110. declare interface HandsInterface {
  111. close(): Promise<void>;
  112. onResults(listener: ResultsListener): void;
  113. initialize(): Promise<void>;
  114. reset(): void;
  115. send(inputs: InputMap): Promise<void>;
  116. setOptions(options: Options): void;
  117. }
  118. /**
  119. * Encapsulates the entire Hand solution. All that is needed from the developer
  120. * is the source of the image data. The user will call `send`
  121. * repeatedly and if a hand is detected, then the user can receive callbacks
  122. * with this metadata.
  123. */
  124. export declare class Hands implements HandsInterface {
  125. constructor(config?: HandsConfig);
  126. /**
  127. * Shuts down the object. Call before creating a new instance.
  128. */
  129. close(): Promise<void>;
  130. /**
  131. * Registers a single callback that will carry any results that occur
  132. * after calling Send().
  133. */
  134. onResults(listener: ResultsListener): void;
  135. /**
  136. * Initializes the solution. This includes loading ML models and mediapipe
  137. * configurations, as well as setting up potential listeners for metadata. If
  138. * `initialize` is not called manually, then it will be called the first time
  139. * the developer calls `send`.
  140. */
  141. initialize(): Promise<void>;
  142. /**
  143. * Tells the graph to restart before the next frame is sent.
  144. */
  145. reset(): void;
  146. /**
  147. * Processes a single frame of data, which depends on the options sent to the
  148. * constructor.
  149. */
  150. send(inputs: InputMap): Promise<void>;
  151. /**
  152. * Adjusts options in the solution. This may trigger a graph reload the next
  153. * time the graph tries to run.
  154. */
  155. setOptions(options: Options): void;
  156. }