|
@@ -1021,7 +1021,6 @@ Blockly.Python.ai_lcd_showcarvas_set_display = function (block) {
|
|
|
Blockly.Python.definitions_['v831_import_camera'] = `from maix import camera`;
|
|
|
Blockly.Python.addVariable(`_canvas_x`, `_canvas_x = 0`, true)
|
|
|
Blockly.Python.addVariable(`_canvas_y`, `_canvas_y = 0`, true)
|
|
|
- Blockly.Python.addVariable(`canvas`, `canvas = ""`, true)
|
|
|
Blockly.Python.definitions_['v831_camera_set'] = `cameraSize = True
|
|
|
def CAMERATYPE():
|
|
|
global cameraSize
|
|
@@ -1047,7 +1046,7 @@ try:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
except:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
- pass
|
|
|
+ ScreenOrientation = False
|
|
|
`
|
|
|
Blockly.Python.definitions_[`v831_display_show_canvas`] = `def v831_display_show_canvas(displayShow,_canvas_x,_canvas_y):
|
|
|
global ScreenOrientation,cameraSize
|
|
@@ -1210,10 +1209,10 @@ Blockly.Blocks['ai_lcd_textcarvas_new_line'] = {
|
|
|
.appendField(Blockly.Msg.image_process_text_dispalyStr);
|
|
|
this.appendValueInput("buttonUp")
|
|
|
.setCheck("String")
|
|
|
- .appendField(Blockly.Msg.image_process_text_buttonUp);
|
|
|
+ .appendField(Blockly.Msg.image_process_text_buttonDown);
|
|
|
this.appendValueInput("buttonDown")
|
|
|
.setCheck("String")
|
|
|
- .appendField(Blockly.Msg.image_process_text_buttonDown);
|
|
|
+ .appendField(Blockly.Msg.image_process_text_buttonUp);
|
|
|
this.appendValueInput("COLOR")
|
|
|
.setCheck("String")
|
|
|
.appendField(Blockly.Msg.image_process_text_color);
|
|
@@ -1274,7 +1273,7 @@ try:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
except:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
- pass
|
|
|
+ ScreenOrientation = False
|
|
|
`
|
|
|
var _code = ""
|
|
|
|
|
@@ -1289,7 +1288,7 @@ key_D = BUTTON(7)`
|
|
|
|
|
|
Blockly.Python.definitions_["ai_lcd_textcarvas_new_line"] = `BUTTONNUMBER = 0
|
|
|
def textCanvasNewLine(canvasName,posax,posay,text,Num,buttonUp,buttonDown,Scale,Color,LineNum):
|
|
|
- global BUTTONNUMBER
|
|
|
+ global BUTTONNUMBER,ScreenOrientation
|
|
|
numText = len(text)//Num + 1
|
|
|
lineHeight = 240
|
|
|
if ScreenOrientation:
|
|
@@ -1405,6 +1404,7 @@ try:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
except:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
+ ScreenOrientation = False
|
|
|
`
|
|
|
var _code = ""
|
|
|
if (color.charAt(0) == '#') {
|
|
@@ -1588,6 +1588,7 @@ try:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
except:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
+ ScreenOrientation = False
|
|
|
`
|
|
|
if (color.charAt(0) == '#') {
|
|
|
var d = 0,
|
|
@@ -1705,6 +1706,7 @@ try:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
except:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
+ ScreenOrientation = False
|
|
|
`
|
|
|
// console.log(xy.split(','),size.split(','))
|
|
|
let a = xy.split(',')
|
|
@@ -1811,6 +1813,7 @@ try:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
except:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
+ ScreenOrientation = False
|
|
|
`
|
|
|
if (color.charAt(0) == '#') {
|
|
|
var d = 0,
|
|
@@ -1849,7 +1852,7 @@ Blockly.Blocks['ai_lcd_draw_image_on_canvas'] = {
|
|
|
this.appendDummyInput()
|
|
|
.appendField(new Blockly.FieldImage("blockly/media/lcd_draw_image.png", 45, 45, { alt: "*", flipRtl: "FALSE" }));
|
|
|
this.appendDummyInput()
|
|
|
- .appendField(Blockly.Msg.image_process_text_on_canvas + Blockly.Msg.image_process_draw_image_title)
|
|
|
+ .appendField(Blockly.Msg.image_process_text_on_canvas + Blockly.Msg.image_process_draw_image_title);
|
|
|
// .appendField(new Blockly.FieldVariable("canvas"), "varitem")
|
|
|
// .appendField();
|
|
|
this.appendValueInput("image_path")
|
|
@@ -1860,7 +1863,7 @@ Blockly.Blocks['ai_lcd_draw_image_on_canvas'] = {
|
|
|
.appendField(Blockly.Msg.OLCD_COORDINATE);
|
|
|
this.appendValueInput("alpha")
|
|
|
.setCheck(null)
|
|
|
- .appendField('图片透明度(范围0~1)');
|
|
|
+ .appendField(Blockly.Msg.image_process_draw_image_alpha);
|
|
|
this.setInputsInline(false);
|
|
|
this.setPreviousStatement(true, null);
|
|
|
this.setNextStatement(true, null);
|
|
@@ -1871,7 +1874,6 @@ Blockly.Blocks['ai_lcd_draw_image_on_canvas'] = {
|
|
|
};
|
|
|
|
|
|
Blockly.Python['ai_lcd_draw_image_on_canvas'] = function (block) {
|
|
|
- // var variable_name = Blockly.Python.variableDB_.getName(block.getFieldValue('varitem'), Blockly.Variables.NAME_TYPE);
|
|
|
var value_image_path = Blockly.Python.valueToCode(block, 'image_path', Blockly.Python.ORDER_ATOMIC);
|
|
|
var value_scale_y = Blockly.Python.valueToCode(block, 'scale_y', Blockly.Python.ORDER_ATOMIC);
|
|
|
var alpha = Blockly.Python.valueToCode(block, 'alpha', Blockly.Python.ORDER_ATOMIC);
|
|
@@ -1904,11 +1906,11 @@ try:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
except:
|
|
|
canvas = image.new(size = (320, 240))
|
|
|
+ ScreenOrientation = False
|
|
|
`
|
|
|
// let alpha = Blockly.Python.valueToCode(block, 'alpha', Blockly.Python.ORDER_ATOMIC)
|
|
|
// TODO: Assemble Python into code variable.
|
|
|
- var code = 'canvas.draw_image(' + value_image_path + ',' + value_scale_y + ',alpha=' + alpha + ')\n' +
|
|
|
- '\n';
|
|
|
+ var code = 'canvas.draw_image(' + value_image_path + ',' + value_scale_y + ',alpha=' + alpha + ')\n';
|
|
|
|
|
|
return code;
|
|
|
};
|
|
@@ -1947,22 +1949,19 @@ Blockly.Python['v831_img_save'] = function (block) {
|
|
|
|
|
|
Blockly.Blocks.v831_img_text = {
|
|
|
init: function () {
|
|
|
- this.setHelpUrl(Blockly.Msg.TEXT_TEXT_HELPURL);
|
|
|
+ this.appendValueInput("save_path")
|
|
|
+ .setCheck(null)
|
|
|
+ .appendField(Blockly.Msg.read_img_path_to)
|
|
|
+ this.setOutput(true, null);
|
|
|
this.setColour("#f0983e");
|
|
|
- this.appendDummyInput().appendField(this.newQuote_(!0)).appendField(new Blockly.FieldTextInput, "TEXT").appendField(this.newQuote_(!1));
|
|
|
- this.setOutput(!0, "String");
|
|
|
- var a = this;
|
|
|
- this.setTooltip(function () {
|
|
|
- var b = a.getParent();
|
|
|
- return b && b.getInputsInline() && b.tooltip || Blockly.Msg.TEXT_TEXT_TOOLTIP
|
|
|
- })
|
|
|
- },
|
|
|
- newQuote_: function (a) {
|
|
|
- return new Blockly.FieldImage(a == this.RTL ? "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAwAAAAKCAQAAAAqJXdxAAAAqUlEQVQI1z3KvUpCcRiA8ef9E4JNHhI0aFEacm1o0BsI0Slx8wa8gLauoDnoBhq7DcfWhggONDmJJgqCPA7neJ7p934EOOKOnM8Q7PDElo/4x4lFb2DmuUjcUzS3URnGib9qaPNbuXvBO3sGPHJDRG6fGVdMSeWDP2q99FQdFrz26Gu5Tq7dFMzUvbXy8KXeAj57cOklgA+u1B5AoslLtGIHQMaCVnwDnADZIFIrXsoXrgAAAABJRU5ErkJggg==" : "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAwAAAAKCAQAAAAqJXdxAAAAn0lEQVQI1z3OMa5BURSF4f/cQhAKjUQhuQmFNwGJEUi0RKN5rU7FHKhpjEH3TEMtkdBSCY1EIv8r7nFX9e29V7EBAOvu7RPjwmWGH/VuF8CyN9/OAdvqIXYLvtRaNjx9mMTDyo+NjAN1HNcl9ZQ5oQMM3dgDUqDo1l8DzvwmtZN7mnD+PkmLa+4mhrxVA9fRowBWmVBhFy5gYEjKMfz9AylsaRRgGzvZAAAAAElFTkSuQmCC", 12, 12, '"')
|
|
|
+ this.setTooltip(Blockly.Msg.read_img_path_to);
|
|
|
+ this.setHelpUrl("");
|
|
|
}
|
|
|
};
|
|
|
-Blockly.Python.v831_img_text = function (a) {
|
|
|
- return [Blockly.Python.quote_(a.getFieldValue("TEXT")), Blockly.Python.ORDER_ATOMIC]
|
|
|
+Blockly.Python.v831_img_text = function (block) {
|
|
|
+ var save_path_location = Blockly.Python.valueToCode(block, 'save_path', Blockly.Python.ORDER_ATOMIC);
|
|
|
+ var code = 'image.open(' + save_path_location + ')';
|
|
|
+ return [code,Blockly.Python.ORDER_NONE];
|
|
|
}
|
|
|
Blockly.Blocks['v831_img_open'] = {
|
|
|
init: function () {
|
|
@@ -1984,11 +1983,7 @@ Blockly.Blocks['v831_img_open'] = {
|
|
|
};
|
|
|
|
|
|
Blockly.Python['v831_img_open'] = function (block) {
|
|
|
- // var variable_name = Blockly.Python.variableDB_.getName(block.getFieldValue('varitem'), Blockly.Variables.NAME_TYPE);
|
|
|
var save_path_location = Blockly.Python.valueToCode(block, 'save_path', Blockly.Python.ORDER_ATOMIC);
|
|
|
- // var compression_rate = block.getFieldValue('compression');
|
|
|
- // TODO: Assemble Python into code variable.
|
|
|
- // , quality=' + compression_rate + ' 图片压缩
|
|
|
var code = 'canvas = image.open(' + save_path_location + ')\n';
|
|
|
return code;
|
|
|
};
|
|
@@ -8732,7 +8727,7 @@ class Number_recognition:
|
|
|
|
|
|
number_recognition = Number_recognition()
|
|
|
`;
|
|
|
- var _code = `canvas = canvas.corp(48,8,224,224)
|
|
|
+ var _code = `canvas = canvas.crop(48,8,224,224)
|
|
|
number_recognition.digitalShowCanvas(canvas)
|
|
|
`
|
|
|
return _code;
|
|
@@ -9005,7 +9000,7 @@ Blockly.Python.ai_model_object_load = function (block) {
|
|
|
self.labels = ${a}
|
|
|
anchors = [5.4, 5.38, 1.65, 2.09, 0.8, 1.83, 2.45, 4.14, 0.46, 0.8]
|
|
|
self.m = nn.load(model, opt=options)
|
|
|
- self.yolo2_decoder = decoder.Yolo2(len(labels), anchors, net_in_size=(224,224), net_out_size=(7, 7))
|
|
|
+ self.yolo2_decoder = decoder.Yolo2(len(self.labels), anchors, net_in_size=(224,224), net_out_size=(7, 7))
|
|
|
self.OBJECTS = {
|
|
|
"OBJECT_START_X": [],
|
|
|
"OBJECT_START_Y": [],
|
|
@@ -9017,6 +9012,16 @@ Blockly.Python.ai_model_object_load = function (block) {
|
|
|
"OBJECT_RESULT": [],
|
|
|
}
|
|
|
def objectRecognize(self, canvas):
|
|
|
+ self.OBJECTS = {
|
|
|
+ "OBJECT_START_X": [],
|
|
|
+ "OBJECT_START_Y": [],
|
|
|
+ "OBJECT_CONFIDENCE": [],
|
|
|
+ "OBJECT_WIDTH": [],
|
|
|
+ "OBJECT_HEIGHT": [],
|
|
|
+ "OBJECT_CENTER_X": [],
|
|
|
+ "OBJECT_CENTER_Y": [],
|
|
|
+ "OBJECT_RESULT": [],
|
|
|
+ }
|
|
|
out = self.m.forward(canvas.tobytes(), quantize=True, layout="hwc")
|
|
|
BOXES, PROBS = self.yolo2_decoder.run(out, nms=0.3, threshold=0.3, img_size=(224,224))
|
|
|
if len(BOXES):
|
|
@@ -9028,13 +9033,14 @@ Blockly.Python.ai_model_object_load = function (block) {
|
|
|
self.OBJECTS["OBJECT_WIDTH"].append(BOXOBJ[0]+BOXOBJ[2])
|
|
|
self.OBJECTS["OBJECT_HEIGHT"].append(BOXOBJ[1]+ BOXOBJ[3])
|
|
|
self.OBJECTS["OBJECT_RESULT"].append(str(self.labels[PROBS[BOXESI][0]]))
|
|
|
- self.OBJECTS["OBJECT_CONFIDENCE"].append(round(PROBS[BOXESI][1][PROBS[BOXESI][0]]*100, 2)]]))
|
|
|
- self.OBJECTS["OBJECT_CENTER_X"].append(self.COLORS[BOXESI])
|
|
|
+ self.OBJECTS["OBJECT_CONFIDENCE"].append(round(PROBS[BOXESI][1][PROBS[BOXESI][0]]*100, 2))
|
|
|
+ self.OBJECTS["OBJECT_CENTER_X"].append(int((BOXOBJ[0] +BOXOBJ[0] + BOXOBJ[2])/2))
|
|
|
+ self.OBJECTS["OBJECT_CENTER_Y"].append(int((BOXOBJ[1] +BOXOBJ[1] + BOXOBJ[3])/2))
|
|
|
|
|
|
ObjectRecognition = ObjectRecognition()
|
|
|
`;
|
|
|
- var _code = `canvas = canvas.corp(48,8,224,224)
|
|
|
-ObjectRecognition.objectRecognize()
|
|
|
+ var _code = `canvas = canvas.crop(48,8,224,224)
|
|
|
+ObjectRecognition.objectRecognize(canvas)
|
|
|
`
|
|
|
return _code;
|
|
|
}
|
|
@@ -9335,14 +9341,14 @@ Blockly.Python.ai_model_Guesswork_load = function (block) {
|
|
|
self.MORAS["MORA_CONFIDENCE"].append(round(prob*100, 2))
|
|
|
self.MORAS["MORA_WIDTH"].append(box[0] + box[2])
|
|
|
self.MORAS["MORA_HEIGHT"].append(box[1] + box[3])
|
|
|
- self.MORAS["MORA_CENTER_X"].append(int((BOXOBJ[0] +BOXOBJ[0] + BOXOBJ[2])/2))
|
|
|
- self.MORAS["MORA_CENTER_Y"].append(int((BOXOBJ[1] + BOXOBJ[1] + BOXOBJ[3])/2))
|
|
|
+ self.MORAS["MORA_CENTER_X"].append(int((box[0] +box[0] + box[2])/2))
|
|
|
+ self.MORAS["MORA_CENTER_Y"].append(int((box[1] + box[1] + box[3])/2))
|
|
|
self.MORAS["MORA_RESULT"].append(self.labels[class_id])
|
|
|
self.draw_rectangle_with_title(input, box, disp_str)
|
|
|
|
|
|
Mora = Mora()
|
|
|
`;
|
|
|
- var _code = `canvas = canvas.corp(48,8,224,224)
|
|
|
+ var _code = `canvas = canvas.crop(48,8,224,224)
|
|
|
Mora.process(canvas)
|
|
|
`
|
|
|
return _code;
|
|
@@ -9901,15 +9907,15 @@ Blockly.Python['ai_model_card_recognition_load'] = function (block) {
|
|
|
for id in LP_number:
|
|
|
string_LP += self.chars[id]
|
|
|
# string_LP += str(id)
|
|
|
- input.draw_string(box[0], box[1], string_LP, color=(225,0,0))
|
|
|
- input.draw_rectangle(box[0], box[1], box[2], box[3],color=(255,0,0), thickness=-1)
|
|
|
+ input.draw_string(box[0], box[1], string_LP, color=(225,105,0))
|
|
|
+ input.draw_rectangle(box[0], box[1], box[2], box[3],color=(150,0,0), thickness=1)
|
|
|
self.LPRS["LPR_START_X"].append(box[0])
|
|
|
self.LPRS["LPR_START_Y"].append(box[1])
|
|
|
- self.LPRS["LPR_WIDTH"].append()
|
|
|
+ self.LPRS["LPR_WIDTH"].append(box[2]-box[0])
|
|
|
self.LPRS["LPR_HEIGHT"].append(box[3]-box[1])
|
|
|
self.LPRS["LPR_RESULT"].append(string_LP)
|
|
|
self.LPRS["LPR_CENTER_X"].append(int((box[2]+box[0])/2))
|
|
|
- self.LPRS["LPR_CENTER_Y"].append(int((box[3]+box[1])/2)))
|
|
|
+ self.LPRS["LPR_CENTER_Y"].append(int((box[3]+box[1])/2))
|
|
|
|
|
|
def get_card_data(self, landmark):
|
|
|
# landmark = i[4][:6]
|
|
@@ -9925,7 +9931,7 @@ Blockly.Python['ai_model_card_recognition_load'] = function (block) {
|
|
|
LPRCARD = LPR()
|
|
|
`;
|
|
|
// TODO: Assemble Python into code variable.
|
|
|
- var code = `canvas = canvas.corp(48,8,224,224)
|
|
|
+ var code = `canvas = canvas.crop(48,8,224,224)
|
|
|
LPRCARD.process(canvas)
|
|
|
`;
|
|
|
return code;
|
|
@@ -10306,10 +10312,7 @@ Blockly.Python['face_recognition_load'] = function (block) {
|
|
|
Blockly.Python.definitions_['v831_import_from_maix_nn'] = `from maix import nn`
|
|
|
Blockly.Python.definitions_['v831_import_from_maix_nn_face'] = `from maix.nn.app import face`
|
|
|
Blockly.Python.definitions_['v831_import_from_maix_nn_FaceRecognize'] = `from maix.nn.app.face import FaceRecognize`
|
|
|
-
|
|
|
- Blockly.Python.addVariable('score_threshold', 'score_threshold = 70', true);
|
|
|
Blockly.Python.addVariable('NUMBERPERSON', 'NUMBERPERSON = 0', true);
|
|
|
- Blockly.Python.addVariable('CLASSNAMEFACELIST', ``, true);
|
|
|
Blockly.Python.definitions_.import_Face_Recognizer = `class Face_Recognizer:
|
|
|
max_face_num = 4
|
|
|
detect_threshold = 0.5
|
|
@@ -10323,16 +10326,14 @@ Blockly.Python['face_recognition_load'] = function (block) {
|
|
|
input_size_fe = (128, 128, 3)
|
|
|
self.feature_len = 256
|
|
|
self.features = []
|
|
|
- print("-- load model:", model)
|
|
|
m = nn.load(model)
|
|
|
- print("-- load ok")
|
|
|
- print("-- load model:", model_fe)
|
|
|
m_fe = nn.load(model_fe)
|
|
|
- print("-- load ok")
|
|
|
self.isLoadRecoredFace = False
|
|
|
self.CLASSNAMEFACELIST = ${value_class_input}
|
|
|
self.recognizer = FaceRecognize(m, m_fe, self.feature_len, self.input_size, threshold, nms, max_face_num)
|
|
|
- print("-- init end")
|
|
|
+ self.FACERECOGNIZES = {
|
|
|
+ }
|
|
|
+ self.FACESRECOGNITONRESULT = []
|
|
|
|
|
|
def get_faces(self, img, std_img = False):
|
|
|
faces = self.recognizer.get_faces(img, std_img)
|
|
@@ -10397,11 +10398,11 @@ Blockly.Python['face_recognition_load'] = function (block) {
|
|
|
w = int(dis_size[0] - 4 - image.get_string_size(key_r)[0] * 1)
|
|
|
img.draw_string( w, 2 ,key_r , scale = 1, color = (255, 255, 255), thickness = 2)
|
|
|
def recognizeShowMessage(self, canvas):
|
|
|
- FACESRECOGNITONRESULT = FACERECGNIZER.get_faces(canvas)
|
|
|
- if len(FACESRECOGNITONRESULT):
|
|
|
- for FACESRECOGNITONRESULTI in FACESRECOGNITONRESULT:
|
|
|
+ self.FACESRECOGNITONRESULT = FACERECGNIZER.get_faces(canvas)
|
|
|
+ if len(self.FACESRECOGNITONRESULT):
|
|
|
+ for FACESRECOGNITONRESULTI in self.FACESRECOGNITONRESULT:
|
|
|
if self.isLoadRecoredFace:
|
|
|
- if round(FACERECGNIZER.recognize(FACESRECOGNITONRESULTI[3])[1], 2)]])>90
|
|
|
+ if int(FACERECGNIZER.recognize(FACESRECOGNITONRESULTI[3])[1])>85:
|
|
|
canvas.draw_rectangle(FACESRECOGNITONRESULTI[1][0],FACESRECOGNITONRESULTI[1][1], FACESRECOGNITONRESULTI[1][0]+FACESRECOGNITONRESULTI[1][2],FACESRECOGNITONRESULTI[1][1]+ FACESRECOGNITONRESULTI[1][3], color=(51,204,0), thickness=1)
|
|
|
canvas.draw_string(40,0, ("".join([str(x) for x in [FACERECGNIZER.recognize(FACESRECOGNITONRESULTI[3])[0], " : ", round(FACERECGNIZER.recognize(FACESRECOGNITONRESULTI[3])[1], 2)]])), scale = 1, color = (51,204,0) , thickness = 1)
|
|
|
else:
|
|
@@ -10437,7 +10438,7 @@ Blockly.Blocks['face_recognition_if_face_new'] = {
|
|
|
|
|
|
Blockly.Python['face_recognition_if_face_new'] = function (block) {
|
|
|
var statements_input = Blockly.Python.statementToCode(block, 'input');
|
|
|
- var code = `for FACESRECOGNITONRESULTI in FACESRECOGNITONRESULT:
|
|
|
+ var code = `for FACESRECOGNITONRESULTI in FACERECGNIZER.FACESRECOGNITONRESULT:
|
|
|
${statements_input}
|
|
|
`
|
|
|
return code;
|
|
@@ -10457,18 +10458,7 @@ Blockly.Blocks['face_recognition_if_face'] = {
|
|
|
};
|
|
|
|
|
|
Blockly.Python['face_recognition_if_face'] = function (block) {
|
|
|
- Blockly.Python.addFunction("recognizeShowMessage", `def recognizeShowMessage():
|
|
|
- global FACESRECOGNITONRESULT,isLoadRecoredFace
|
|
|
- if len(FACESRECOGNITONRESULT):
|
|
|
- for FACESRECOGNITONRESULTI in FACESRECOGNITONRESULT:
|
|
|
- if isLoadRecoredFace:
|
|
|
- canvas.draw_rectangle(FACESRECOGNITONRESULTI[1][0],FACESRECOGNITONRESULTI[1][1], FACESRECOGNITONRESULTI[1][0]+FACESRECOGNITONRESULTI[1][2],FACESRECOGNITONRESULTI[1][1]+ FACESRECOGNITONRESULTI[1][3], color=(51,204,0), thickness=1)
|
|
|
- canvas.draw_string(40,0, ("".join([str(x) for x in [FACERECGNIZER.recognize(FACESRECOGNITONRESULTI[3])[0], " : ", round(FACERECGNIZER.recognize(FACESRECOGNITONRESULTI[3])[1], 2)]])), scale = 1, color = (51,204,0) , thickness = 1)
|
|
|
- else:
|
|
|
- FACERECGNIZER.map_face(FACESRECOGNITONRESULTI[1],FACESRECOGNITONRESULTI[2],canvas)
|
|
|
- canvas.draw_rectangle(FACESRECOGNITONRESULTI[1][0],FACESRECOGNITONRESULTI[1][1], FACESRECOGNITONRESULTI[1][0]+FACESRECOGNITONRESULTI[1][2],FACESRECOGNITONRESULTI[1][1]+ FACESRECOGNITONRESULTI[1][3], color=(255,0,0), thickness=1)
|
|
|
-`)
|
|
|
- var code = `recognizeShowMessage()\n`;
|
|
|
+ var code = ``;
|
|
|
return code;
|
|
|
};
|
|
|
|
|
@@ -10648,7 +10638,7 @@ def _CREATE_TEXT_FILE_WITH_CONTENT(_path, _data, _sep):
|
|
|
except:
|
|
|
pass
|
|
|
canvas.draw_string(0,0, str("${saveFace}") + str(NUMBERPERSON), scale = 1, color = (51,204,0) , thickness = 1)
|
|
|
- v831_display_show_canvas(canvas)
|
|
|
+ v831_display_show_canvas(canvas,_canvas_x,_canvas_y)
|
|
|
time.sleep(1000 / 1000)
|
|
|
`;
|
|
|
return code;
|
|
@@ -10707,7 +10697,7 @@ key_D = BUTTON(7)
|
|
|
|
|
|
canvas.draw_rectangle(0,0, 0+320,0+ 20, color=(0,0,0), thickness=-1)
|
|
|
canvas.draw_string(40,0, ("".join([str(x) for x in ["${removeFace}", NUMBERPERSON, "${faceData}"]])), scale = 1, color = (255,255,255) , thickness = 1)
|
|
|
- v831_display_show_canvas(canvas)
|
|
|
+ v831_display_show_canvas(canvas,_canvas_x,_canvas_y)
|
|
|
time.sleep(1000 / 1000)
|
|
|
NUMBERPERSON = NUMBERPERSON - 1
|
|
|
FACERECGNIZER.remove_user(FACERECGNIZER.CLASSNAMEFACELIST[len(FACERECGNIZER)-1])
|
|
@@ -10950,7 +10940,8 @@ key_B = BUTTON(8)
|
|
|
key_C = BUTTON(13)
|
|
|
key_D = BUTTON(7)
|
|
|
`
|
|
|
- var code = `if key_${_type}.is_pressed() and SELFLEARNCOUNT < SELFLEARN.class_num:
|
|
|
+ var code = `canvas = canvas.crop(48, 8, 224, 224)
|
|
|
+if key_${_type}.is_pressed() and SELFLEARNCOUNT < SELFLEARN.class_num:
|
|
|
while not (key_${_type}.is_pressed() == False):
|
|
|
time.sleep(0.1)
|
|
|
SELFLEARN.classifier.add_class_img(canvas)
|
|
@@ -11052,7 +11043,8 @@ Blockly.Blocks['ai_model_self_learning_class_verification'] = {
|
|
|
};
|
|
|
|
|
|
Blockly.Python['ai_model_self_learning_class_verification'] = function (block) {
|
|
|
- var code = `SELFLEARNidx, SELFLEARNdistance = SELFLEARN.classifier.predict(canvas)
|
|
|
+ var code = `canvas = canvas.crop(48, 8, 224, 224)
|
|
|
+SELFLEARNidx, SELFLEARNdistance = SELFLEARN.classifier.predict(canvas)
|
|
|
`;
|
|
|
return code;
|
|
|
};
|
|
@@ -12006,49 +11998,6 @@ Blockly.Python['ai_model_class_start_load'] = function (block) {
|
|
|
return code;
|
|
|
}
|
|
|
|
|
|
-Blockly.Blocks['ai_model_class_gather'] = {
|
|
|
- init: function () {
|
|
|
- this.appendDummyInput()
|
|
|
- .appendField(Blockly.Msg.ai_model_class_gather);
|
|
|
- this.setPreviousStatement(true, null);
|
|
|
- this.setNextStatement(true, null);
|
|
|
- this.setColour("#ee783a");
|
|
|
- this.setTooltip(Blockly.Msg.ai_model_class_gather_TOOLTIP);
|
|
|
- this.setHelpUrl("");
|
|
|
- }
|
|
|
-};
|
|
|
-
|
|
|
-Blockly.Python['ai_model_class_gather'] = function (block) {
|
|
|
- var code = '' +
|
|
|
- '# capture img\n' +
|
|
|
- 'if train_status == 0:\n' +
|
|
|
- ' if key1.value() == 0 and last_btn_status == 1:\n' +
|
|
|
- ' #time.sleep_ms(30)\n' +
|
|
|
- ' #if key1.value() == 1 and (last_btn_status == 1) and (time.ticks_ms() - last_cap_time > 500):\n' +
|
|
|
- ' last_btn_status = 0\n' +
|
|
|
- ' last_cap_time = time.ticks_ms()\n' +
|
|
|
- ' if cap_num < class_num:\n' +
|
|
|
- ' index = classifier.add_class_img(' + localStorage.getItem("ai_model_class_start_load") + ')\n' +
|
|
|
- ' cap_num += 1\n' +
|
|
|
- ' elif cap_num < class_num + sample_num:\n' +
|
|
|
- ' index = classifier.add_sample_img(' + localStorage.getItem("ai_model_class_start_load") + ')\n' +
|
|
|
- ' cap_num += 1\n' +
|
|
|
- ' #else:\n' +
|
|
|
- ' # ' + localStorage.getItem("ai_model_class_start_load") + ' = draw_string(' + localStorage.getItem("ai_model_class_start_load") + ', 2, 200, "release boot key please", color=lcd.WHITE,scale=1, bg=lcd.RED)\n' +
|
|
|
- ' else:\n' +
|
|
|
- ' #time.sleep_ms(30)\n' +
|
|
|
- ' if key1.value() == 1 and (last_btn_status == 0):\n' +
|
|
|
- ' last_btn_status = 1\n' +
|
|
|
- ' if cap_num < class_num:\n' +
|
|
|
- ' ' + localStorage.getItem("ai_model_class_start_load") + '.draw_rectangle(-2,0, len("press right key to cap "+class_names[cap_num])*10+8 , 24, fill=True, color=lcd.RED)\n' +
|
|
|
- ' ' + localStorage.getItem("ai_model_class_start_load") + ' = lcd_draw_string(' + localStorage.getItem("ai_model_class_start_load") + ', 0, 2, "press right key to cap "+class_names[cap_num], color=lcd.WHITE,scale=1,mono_space=False)\n' +
|
|
|
- ' elif cap_num < class_num + sample_num:\n' +
|
|
|
- ' ' + localStorage.getItem("ai_model_class_start_load") + '.draw_rectangle(-2,0, len("right key to cap {} {}".format(class_names[(cap_num-class_num) // 5],5 if (cap_num-class_num+1) % 5 == 0 else (cap_num-class_num+1) % 5))*10+8 , 24, fill=True, color=lcd.RED)\n' +
|
|
|
- ' ' + localStorage.getItem("ai_model_class_start_load") + ' = lcd_draw_string(' + localStorage.getItem("ai_model_class_start_load") + ', 0, 2, "right key to cap {} {}".format(class_names[(cap_num-class_num) // 5],5 if (cap_num-class_num+1) % 5 == 0 else (cap_num-class_num+1) % 5), color=lcd.WHITE,scale=1,mono_space=False)\n' +
|
|
|
- '\n';
|
|
|
- return code;
|
|
|
-};
|
|
|
-
|
|
|
Blockly.Blocks['ai_model_class_data_load'] = {
|
|
|
init: function () {
|
|
|
this.appendDummyInput()
|
|
@@ -12294,8 +12243,6 @@ Blockly.Python['ai_model_customized_load_setup'] = function (block) {
|
|
|
"norm": [0.0078125, 0.0078125, 0.0078125],
|
|
|
}
|
|
|
def __init__(self):
|
|
|
- from maix import nn
|
|
|
- from maix.nn import decoder
|
|
|
self.model = nn.load(self.m, opt=self.options)
|
|
|
self.decoder = decoder.Yolo2(len(self.labels), self.anchors, net_in_size=(224, 224), net_out_size=(7, 7))
|
|
|
self.FACESYOLOS = {
|
|
@@ -12313,8 +12260,8 @@ Blockly.Python['ai_model_customized_load_setup'] = function (block) {
|
|
|
del self.decoder
|
|
|
|
|
|
def faceRecognizeShowResult(self,canvas):
|
|
|
- out = Yolo.model.forward(canvas, quantize=True, layout="hwc")
|
|
|
- BOXES, PROBS = Yolo.decoder.run(out, nms=0.3, threshold=0.3, img_size=(224, 224))
|
|
|
+ out = self.model.forward(canvas, quantize=True, layout="hwc")
|
|
|
+ BOXES, PROBS = self.decoder.run(out, nms=0.3, threshold=0.3, img_size=(224, 224))
|
|
|
self.FACESYOLOS = {
|
|
|
"FACE_START_X": [],
|
|
|
"FACE_START_Y": [],
|
|
@@ -12330,11 +12277,13 @@ Blockly.Python['ai_model_customized_load_setup'] = function (block) {
|
|
|
canvas.draw_string((BOXOBJ[0]),(BOXOBJ[1]), ("".join([str(x) for x in [self.labels[PROBS[BOXESI][0]], ";", str(round((PROBS[BOXESI][1][PROBS[BOXESI][0]]*100), 2)) + str("%")]])), scale = 1, color = (255,0,0) , thickness = 1)
|
|
|
canvas.draw_rectangle((BOXOBJ[0]),(BOXOBJ[1]), (BOXOBJ[0])+(BOXOBJ[2]),(BOXOBJ[1])+ (BOXOBJ[3]), color=(255,0,0), thickness=1)
|
|
|
self.FACESYOLOS["FACE_START_X"].append(BOXOBJ[0])
|
|
|
- slef.FACESYOLOS["FACE_START_Y"].append(BOXOBJ[1])
|
|
|
+ self.FACESYOLOS["FACE_START_Y"].append(BOXOBJ[1])
|
|
|
self.FACESYOLOS["FACE_WIDTH"].append(BOXOBJ[0]+BOXOBJ[2])
|
|
|
self.FACESYOLOS["FACE_HEIGHT"].append(BOXOBJ[1]+BOXOBJ[3])
|
|
|
self.FACESYOLOS["FACE_CONFIDENCE"].append(round(PROBS[BOXESI][1][PROBS[BOXESI][0]]*100,2))
|
|
|
- self.FACESYOLOS["FACE_RESULT"].append(self.labels[PROBS[BOXESI][0])
|
|
|
+ self.FACESYOLOS["FACE_RESULT"].append(self.labels[PROBS[BOXESI][0]])
|
|
|
+ self.FACESYOLOS["FACE_CENTET_X"].append(int((BOXOBJ[0]+BOXOBJ[2])/2))
|
|
|
+ self.FACESYOLOS["FACE_CENTET_Y"].append(int((BOXOBJ[1]+BOXOBJ[3])/2))
|
|
|
|
|
|
|
|
|
FaceYolo = Yolo()
|
|
@@ -13598,7 +13547,8 @@ Blockly.Python['Numpy_Neural_Network_numpy_loading_model_fun'] = function (block
|
|
|
return result
|
|
|
`
|
|
|
// TODO: Assemble Python into code variable.
|
|
|
- var code = `GETNUMPYRESULT = numpyResult(canvas)
|
|
|
+ var code = `canvas = canvas.crop(48,8,224,224)
|
|
|
+GETNUMPYRESULT = numpyResult(canvas)
|
|
|
if GETNUMPYRESULT != "":
|
|
|
canvas.draw_string(0,0, (str(GETNUMPYRESULT)), scale = 3, color = (204,204,204) , thickness = 1)
|
|
|
else:
|