Best Python code snippet using Airtest
sift.py
Source:sift.py
...159 h_s, w_s = im_source.shape[:2]160 pts = np.float32([[0, 0], [0, h - 1], [w - 1, h - 1], [w - 1, 0]]).reshape(-1, 1, 2)161 dst = cv2.perspectiveTransform(pts, M)162 # trans numpy arrary to python list: [(a, b), (a1, b1), ...]163 def cal_rect_pts(dst):164 return [tuple(npt[0]) for npt in dst.astype(int).tolist()]165 pypts = cal_rect_pts(dst)166 # 注æï¼è½ç¶4个è§ç¹æå¯è½è¶åºsourceå¾è¾¹çï¼ä½æ¯(æ ¹æ®ç²¾ç¡®åæ å°åæ å°ç©éµM线æ§æºå¶)ä¸ç¹ä¸ä¼è¶åºè¾¹ç167 lt, br = pypts[0], pypts[2]168 middle_point = int((lt[0] + br[0]) / 2), int((lt[1] + br[1]) / 2)169 # èèå°ç®åºçç®æ ç©éµæå¯è½æ¯ç¿»è½¬çæ
åµï¼å¿
é¡»è¿è¡ä¸æ¬¡å¤çï¼ç¡®ä¿æ å°åçâå·¦ä¸è§âå¨å¾çä¸ä¹æ¯å·¦ä¸è§ç¹ï¼170 x_min, x_max = min(lt[0], br[0]), max(lt[0], br[0])171 y_min, y_max = min(lt[1], br[1]), max(lt[1], br[1])172 # æéåºç®æ ç©å½¢åºåå¯è½ä¼æè¶çæ
åµï¼è¶çæ¶ç´æ¥å°å
¶ç½®ä¸ºè¾¹çï¼173 # è¶
åºå·¦è¾¹çå0ï¼è¶
åºå³è¾¹çåw_s-1ï¼è¶
åºä¸è¾¹çå0ï¼è¶
åºä¸è¾¹çåh_s-1174 # å½x_minå°äº0æ¶ï¼å0ã x_maxå°äº0æ¶ï¼å0ã175 x_min, x_max = int(max(x_min, 0)), int(max(x_max, 0))176 # å½x_min大äºw_sæ¶ï¼åå¼w_s-1ã x_max大äºw_s-1æ¶ï¼åw_s-1ã177 x_min, x_max = int(min(x_min, w_s - 1)), int(min(x_max, w_s - 1))178 # å½y_minå°äº0æ¶ï¼å0ã y_maxå°äº0æ¶ï¼å0ã179 y_min, y_max = int(max(y_min, 0)), int(max(y_max, 0))180 # å½y_min大äºh_sæ¶ï¼åå¼h_s-1ã y_max大äºh_s-1æ¶ï¼åh_s-1ã181 y_min, y_max = int(min(y_min, h_s - 1)), int(min(y_max, h_s - 1))182 # ç®æ åºåçè§ç¹ï¼æå·¦ä¸ãå·¦ä¸ãå³ä¸ãå³ä¸ç¹åºï¼(x_min,y_min)(x_min,y_max)(x_max,y_max)(x_max,y_min)183 pts = np.float32([[x_min, y_min], [x_min, y_max], [184 x_max, y_max], [x_max, y_min]]).reshape(-1, 1, 2)185 pypts = cal_rect_pts(pts)186 return middle_point, pypts, [x_min, x_max, y_min, y_max, w, h]187def _two_good_points(pts_sch1, pts_sch2, pts_src1, pts_src2, im_search, im_source):188 """è¿å两对å¹é
ç¹å¾ç¹æ
å½¢ä¸çè¯å«ç»æ."""189 # å
ç®åºä¸å¿ç¹(å¨im_sourceä¸çåæ )ï¼190 middle_point = [int((pts_src1[0] + pts_src2[0]) / 2), int((pts_src1[1] + pts_src2[1]) / 2)]191 pypts = []192 # å¦æç¹å¾ç¹åxè½´æåyè½´(æ 论srcè¿æ¯schä¸)ï¼åä¸è½è®¡ç®åºç®æ ç©å½¢åºåæ¥ï¼æ¤æ¶è¿åå¼ågood=1æ
å½¢193 if pts_sch1[0] == pts_sch2[0] or pts_sch1[1] == pts_sch2[1] or pts_src1[0] == pts_src2[0] or pts_src1[1] == pts_src2[1]:194 confidence = ONE_POINT_CONFI195 one_match = generate_result(middle_point, pypts, confidence)196 return one_match197 # 计ç®x,yè½´ç缩æ¾æ¯ä¾ï¼x_scaleãy_scaleï¼ä»middleç¹æ©å¼ åºç®æ åºå:(注ææ´æ°è®¡ç®è¦è½¬ææµ®ç¹æ°ç»æ!)198 h, w = im_search.shape[:2]199 h_s, w_s = im_source.shape[:2]...
sift_test.py
Source:sift_test.py
...77 h_s, w_s = im_source.shape[:2]78 pts = np.float32([[0, 0], [0, h - 1], [w - 1, h - 1], [w - 1, 0]]).reshape(-1, 1, 2)79 dst = cv2.perspectiveTransform(pts, M)80 # trans numpy arrary to python list: [(a, b), (a1, b1), ...]81 def cal_rect_pts(dst):82 return [tuple(npt[0]) for npt in dst.astype(int).tolist()]83 pypts = cal_rect_pts(dst)84 # 注æï¼è½ç¶4个è§ç¹æå¯è½è¶åºsourceå¾è¾¹çï¼ä½æ¯(æ ¹æ®ç²¾ç¡®åæ å°åæ å°ç©éµM线æ§æºå¶)ä¸ç¹ä¸ä¼è¶åºè¾¹ç85 lt, br = pypts[0], pypts[2]86 middle_point = int((lt[0] + br[0]) / 2), int((lt[1] + br[1]) / 2)87 # èèå°ç®åºçç®æ ç©éµæå¯è½æ¯ç¿»è½¬çæ
åµï¼å¿
é¡»è¿è¡ä¸æ¬¡å¤çï¼ç¡®ä¿æ å°åçâå·¦ä¸è§âå¨å¾çä¸ä¹æ¯å·¦ä¸è§ç¹ï¼88 x_min, x_max = min(lt[0], br[0]), max(lt[0], br[0])89 y_min, y_max = min(lt[1], br[1]), max(lt[1], br[1])90 # æéåºç®æ ç©å½¢åºåå¯è½ä¼æè¶çæ
åµï¼è¶çæ¶ç´æ¥å°å
¶ç½®ä¸ºè¾¹çï¼91 # è¶
åºå·¦è¾¹çå0ï¼è¶
åºå³è¾¹çåw_s-1ï¼è¶
åºä¸è¾¹çå0ï¼è¶
åºä¸è¾¹çåh_s-192 # å½x_minå°äº0æ¶ï¼å0ã x_maxå°äº0æ¶ï¼å0ã93 x_min, x_max = int(max(x_min, 0)), int(max(x_max, 0))94 # å½x_min大äºw_sæ¶ï¼åå¼w_s-1ã x_max大äºw_s-1æ¶ï¼åw_s-1ã95 x_min, x_max = int(min(x_min, w_s - 1)), int(min(x_max, w_s - 1))96 # å½y_minå°äº0æ¶ï¼å0ã y_maxå°äº0æ¶ï¼å0ã97 y_min, y_max = int(max(y_min, 0)), int(max(y_max, 0))98 # å½y_min大äºh_sæ¶ï¼åå¼h_s-1ã y_max大äºh_s-1æ¶ï¼åh_s-1ã99 y_min, y_max = int(min(y_min, h_s - 1)), int(min(y_max, h_s - 1))100 # ç®æ åºåçè§ç¹ï¼æå·¦ä¸ãå·¦ä¸ãå³ä¸ãå³ä¸ç¹åºï¼(x_min,y_min)(x_min,y_max)(x_max,y_max)(x_max,y_min)101 pts = np.float32([[x_min, y_min], [x_min, y_max], [102 x_max, y_max], [x_max, y_min]]).reshape(-1, 1, 2)103 pypts = cal_rect_pts(pts)104 return middle_point, pypts, [x_min, x_max, y_min, y_max, w, h]105# å¹é
ç¹å¯¹ >= 4个ï¼ä½¿ç¨åç©éµæ å°æ±åºç®æ åºåï¼æ®æ¤ç®åºå¯ä¿¡åº¦ï¼106middle_point, pypts, w_h_range = _many_good_pts(im_source, im_search, kp_sch, kp_src, good)107print(middle_point)108print(pypts)109print(w_h_range)110# best_match = generate_result(middle_point, pypts, confidence)111#112# print("[sift] result=%s" % (best_match))113# matchesMask = [[0, 0] for i in range(len(matches))]114# coff = 0.2115# for i,(m,n) in enumerate(matches):116# if m.distance < coff * n.distance:117# matchesMask[i]=[1,0]...
keypoint.py
Source:keypoint.py
...54 -1, 1, 255 )56 dst = cv2.perspectiveTransform(pts, M)57 # trans numpy arrary to python list: [(a, b), (a1, b1), ...]58 def cal_rect_pts(dst):59 return [tuple(npt[0]) for npt in dst.astype(int).tolist()]60 pypts = cal_rect_pts(dst)61 # 注æï¼è½ç¶4个è§ç¹æå¯è½è¶åºsourceå¾è¾¹çï¼ä½æ¯(æ ¹æ®ç²¾ç¡®åæ å°åæ å°ç©éµM线æ§æºå¶)ä¸ç¹ä¸ä¼è¶åºè¾¹ç62 lt, br = pypts[0], pypts[2]63 middlePoint = int((lt[0] + br[0]) / 2), int((lt[1] + br[1]) / 2)64 # èèå°ç®åºçç®æ ç©éµæå¯è½æ¯ç¿»è½¬çæ
åµï¼å¿
é¡»è¿è¡ä¸æ¬¡å¤çï¼ç¡®ä¿æ å°åçâå·¦ä¸è§âå¨å¾çä¸ä¹æ¯å·¦ä¸è§ç¹ï¼65 xMin, xMax = min(lt[0], br[0]), max(lt[0], br[0])66 yMin, yMax = min(lt[1], br[1]), max(lt[1], br[1])67 # æéåºç®æ ç©å½¢åºåå¯è½ä¼æè¶çæ
åµï¼è¶çæ¶ç´æ¥å°å
¶ç½®ä¸ºè¾¹çï¼68 # è¶
åºå·¦è¾¹çå0ï¼è¶
åºå³è¾¹çåw_s-1ï¼è¶
åºä¸è¾¹çå0ï¼è¶
åºä¸è¾¹çåh_s-169 # å½x_minå°äº0æ¶ï¼å0ã x_maxå°äº0æ¶ï¼å0ã70 xMin, xMax = int(max(xMin, 0)), int(max(xMax, 0))71 # å½x_min大äºw_sæ¶ï¼åå¼w_s-1ã x_max大äºw_s-1æ¶ï¼åw_s-1ã72 xMin, xMax = int(min(xMin, w_s - 1)), int(min(xMax, w_s - 1))73 # å½y_minå°äº0æ¶ï¼å0ã y_maxå°äº0æ¶ï¼å0ã74 yMin, yMax = int(max(yMin, 0)), int(max(yMax, 0))75 # å½y_min大äºh_sæ¶ï¼åå¼h_s-1ã y_max大äºh_s-1æ¶ï¼åh_s-1ã76 yMin, yMax = int(min(yMin, h_s - 1)), int(min(yMax, h_s - 1))77 # ç®æ åºåçè§ç¹ï¼æå·¦ä¸ãå·¦ä¸ãå³ä¸ãå³ä¸ç¹åºï¼(x_min,y_min)(x_min,y_max)(x_max,y_max)(x_max,y_min)78 pts = np.float32(79 [[xMin, yMin], [xMin, yMax], [xMax, yMax], [xMax, yMin]]80 ).reshape(-1, 1, 2)81 pypts = cal_rect_pts(pts)82 return middlePoint, pypts, [xMin, xMax, yMin, yMax, w, h]83def KAZEMatching(filename, device, threshold=0.9, targetPos=5):84 # 1.读åå¾ç85 imSearch = cv2.imread("img/{name}".format(name=filename))86 imSource = device.screenshot(format="opencv")87 # 2.è·åç¹å¾ç¹é并å¹é
åºç¹å¾ç¹å¯¹88 kpSch, kpSrc, good = getKeyPoints(imSearch, imSource)89 # 3.æ ¹æ®å¹é
ç¹å¯¹(good),æååºæ¥è¯å«åºå:90 originResult = handleGoodPoints(kpSch, kpSrc, good, imSearch, imSource)91 # æäºç¹æ®æ
åµä¸ç´æ¥è¿åNoneä½ä¸ºå¹é
ç»æ:92 if originResult is None:93 return None94 else:95 middlePoint, pypts, posRange = originResult...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!