Best Python code snippet using playwright-python
testing_server.py
Source:testing_server.py
1import traceback2import uuid3import socket4import logging5import os6import base647import zlib8import gzip9import time10import datetime11from http import cookies12from http.server import BaseHTTPRequestHandler13from http.server import HTTPServer14from threading import Thread15import WebRequest16def capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False):17 # print("Capturing expected headers:")18 # print(expected_headers)19 assert isinstance(expected_headers, dict), "expected_headers must be a dict. Passed a %s" & type(expected_headers)20 for key, val in expected_headers.items():21 assert isinstance(key, str)22 assert isinstance(val, str)23 cookie_key = uuid.uuid4().hex24 log = logging.getLogger("Main.TestServer")25 sucuri_reqs_1 = 026 sucuri_reqs_2 = 027 sucuri_reqs_3 = 028 class MockServerRequestHandler(BaseHTTPRequestHandler):29 def log_message(self, format, *args):30 return31 def validate_headers(self):32 for key, value in expected_headers.items():33 if (is_annoying_pjs or is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Encoding':34 # So PhantomJS monkeys with accept-encoding headers35 # Just ignore that particular header, I guess.36 pass37 # Selenium is fucking retarded, and I can't override the user-agent38 # and other assorted parameters via their API at all.39 elif (is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Language':40 pass41 # Chromium is just broken completely for the accept header42 elif (is_annoying_pjs or is_selenium_garbage_chromium or is_chromium or skip_header_checks) and key == 'Accept':43 pass44 elif not skip_header_checks:45 v1 = value.replace(" ", "")46 v2 = self.headers[key]47 if v2 is None:48 v2 = ""49 v2 = v2.replace(" ", "")50 test_context.assertEqual(v1, v2, msg="Mismatch in header parameter '{}' : expect: '{}' -> received:'{}' ({})".format(51 key,52 value,53 self.headers[key],54 {55 'is_annoying_pjs' : is_annoying_pjs,56 'is_chromium' : is_chromium,57 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium,58 'skip_header_checks' : skip_header_checks,59 },60 )61 )62 def _get_handler(self):63 # Process an HTTP GET request and return a response with an HTTP 200 status.64 # print("Path: ", self.path)65 # print("Headers: ", self.headers)66 # print("Cookie(s): ", self.headers.get_all('Cookie', failobj=[]))67 try:68 self.validate_headers()69 except Exception:70 self.send_response(500)71 self.send_header('Content-type', "text/html")72 self.end_headers()73 self.wfile.write(b"Headers failed validation!")74 raise75 if self.path == "/":76 self.send_response(200)77 self.send_header('Content-type', "text/html")78 self.end_headers()79 self.wfile.write(b"Root OK?")80 elif self.path == "/favicon.ico":81 self.send_response(404)82 self.end_headers()83 elif self.path == "/raw-txt":84 self.send_response(200)85 self.send_header('Content-type', "text/plain")86 self.end_headers()87 self.wfile.write(b"Root OK?")88 elif self.path == "/html-decode":89 self.send_response(200)90 self.send_header('Content-type', "text/html")91 self.end_headers()92 self.wfile.write(b"Root OK?")93 elif self.path == "/html/real":94 self.send_response(200)95 self.send_header('Content-type', "text/html")96 self.end_headers()97 self.wfile.write(b"<html><body>Root OK?</body></html>")98 elif self.path == "/compressed/deflate":99 self.send_response(200)100 self.send_header('Content-Encoding', 'deflate')101 self.send_header('Content-type', "text/html")102 self.end_headers()103 inb = b"Root OK?"104 cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS)105 t1 = cobj.compress(inb) + cobj.flush()106 self.wfile.write(t1)107 elif self.path == "/compressed/gzip":108 self.send_response(200)109 self.send_header('Content-Encoding', 'gzip')110 self.send_header('Content-type', "text/html")111 self.end_headers()112 self.wfile.write(gzip.compress(b"Root OK?"))113 elif self.path == "/json/invalid":114 self.send_response(200)115 self.send_header('Content-type', "text/html")116 self.end_headers()117 self.wfile.write(b"LOLWAT")118 elif self.path == "/json/valid":119 self.send_response(200)120 self.send_header('Content-type', "text/html")121 self.end_headers()122 self.wfile.write(b'{"oh" : "hai"}')123 elif self.path == "/json/no-coding":124 self.send_response(200)125 self.end_headers()126 self.wfile.write(b'{"oh" : "hai"}')127 elif self.path == "/filename/path-only.txt":128 self.send_response(200)129 self.end_headers()130 self.wfile.write(b"LOLWAT?")131 elif self.path == "/filename/path-only-trailing-slash/":132 self.send_response(200)133 self.end_headers()134 self.wfile.write(b"LOLWAT?")135 elif self.path == "/filename/content-disposition":136 self.send_response(200)137 self.send_header('Content-Disposition', "filename=lolercoaster.txt")138 self.end_headers()139 self.wfile.write(b"LOLWAT?")140 elif self.path == "/filename_mime/path-only.txt":141 self.send_response(200)142 self.end_headers()143 self.wfile.write(b"LOLWAT?")144 elif self.path == "/filename_mime/content-disposition":145 self.send_response(200)146 self.send_header('Content-Disposition', "filename=lolercoaster.txt")147 self.end_headers()148 self.wfile.write(b"LOLWAT?")149 elif self.path == "/filename_mime/content-disposition-html-suffix":150 self.send_response(200)151 self.send_header('Content-Disposition', "filename=lolercoaster.html")152 self.end_headers()153 self.wfile.write(b"LOLWAT?")154 elif self.path == "/filename_mime/content-disposition-quotes-1":155 self.send_response(200)156 self.send_header('Content-Disposition', "filename='lolercoaster.html'")157 self.end_headers()158 self.wfile.write(b"LOLWAT?")159 elif self.path == "/filename_mime/content-disposition-quotes-2":160 self.send_response(200)161 self.send_header('Content-Disposition', "filename=\'lolercoaster.html\'")162 self.end_headers()163 self.wfile.write(b"LOLWAT?")164 elif self.path == "/filename_mime/content-disposition-quotes-spaces-1":165 self.send_response(200)166 self.send_header('Content-Disposition', "filename='loler coaster.html'")167 self.end_headers()168 self.wfile.write(b"LOLWAT?")169 elif self.path == "/filename_mime/content-disposition-quotes-spaces-2":170 self.send_response(200)171 self.send_header('Content-Disposition', "filename=\"loler coaster.html\"")172 self.end_headers()173 self.wfile.write(b"LOLWAT?")174 elif self.path == "/filename_mime/explicit-html-mime":175 self.send_response(200)176 self.send_header('Content-Disposition', "filename=lolercoaster.html")177 self.send_header('Content-type', "text/html")178 self.end_headers()179 self.wfile.write(b"LOLWAT?")180 elif self.path == "/redirect/bad-1":181 self.send_response(302)182 self.end_headers()183 elif self.path == "/redirect/bad-2":184 self.send_response(302)185 self.send_header('location', "bad-2")186 self.end_headers()187 elif self.path == "/redirect/bad-3":188 self.send_response(302)189 self.send_header('location', "gopher://www.google.com")190 self.end_headers()191 elif self.path == "/redirect/from-1":192 self.send_response(302)193 self.send_header('location', "to-1")194 self.end_headers()195 elif self.path == "/redirect/to-1":196 self.send_response(200)197 self.end_headers()198 self.wfile.write(b"Redirect-To-1")199 elif self.path == "/redirect/from-2":200 self.send_response(302)201 self.send_header('uri', "to-2")202 self.end_headers()203 elif self.path == "/redirect/to-2":204 self.send_response(200)205 self.end_headers()206 self.wfile.write(b"Redirect-To-2")207 elif self.path == "/redirect/from-3":208 self.send_response(302)209 newurl = "http://{}:{}".format(self.server.server_address[0], self.server.server_address[1])210 self.send_header('uri', newurl)211 self.end_headers()212 elif self.path == "/password/expect":213 # print("Password")214 # print(self.headers)215 self.send_response(200)216 self.end_headers()217 if not 'Authorization' in self.headers:218 self.wfile.write(b"Password not sent!!")219 return220 val = self.headers['Authorization']221 passval = val.split(" ")[-1]222 passstr = base64.b64decode(passval)223 if passstr == b'lol:wat':224 self.wfile.write(b"Password Ok?")225 else:226 self.wfile.write(b"Password Bad!")227 elif self.path == "/content/have-title":228 self.send_response(200)229 self.end_headers()230 self.wfile.write(b"<html><head><title>I can haz title?</title></head><body>This page has a title!</body></html>")231 elif self.path == "/content/no-title":232 self.send_response(200)233 self.end_headers()234 self.wfile.write(b"<html><head></head><body>This page has no title. Sadface.jpg</body></html>")235 elif self.path == "/binary_ctnt":236 self.send_response(200)237 self.send_header('Content-type', "image/jpeg")238 self.end_headers()239 self.wfile.write(b"Binary!\x00\x01\x02\x03")240 elif self.path == "/binary_ctnt":241 self.send_response(200)242 self.send_header('Content-type', "image/jpeg")243 self.end_headers()244 self.wfile.write(b"Binary!\x00\x01\x02\x03")245 ##################################################################################################################################246 # Cookie stuff247 ##################################################################################################################################248 elif self.path == '/cookie_test':249 cook = cookies.SimpleCookie()250 cook['cookie_test_key'] = cookie_key251 cook['cookie_test_key']['path'] = "/"252 cook['cookie_test_key']['domain'] = ""253 expiration = datetime.datetime.now() + datetime.timedelta(days=30)254 cook['cookie_test_key']["expires"] = expiration.strftime("%a, %d-%b-%Y %H:%M:%S PST")255 self.send_response(200)256 self.send_header('Content-type', "text/html")257 self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString())258 self.end_headers()259 self.wfile.write(b"<html><body>CF Cookie Test</body></html>")260 elif self.path == '/cookie_require':261 if self.headers.get_all('Cookie', failobj=[]):262 cook = self.headers.get_all('Cookie', failobj=[])[0]263 cook_key, cook_value = cook.split("=", 1)264 if cook_key == 'cookie_test_key' and cook_value == cookie_key:265 self.send_response(200)266 self.send_header('Content-type', "text/html")267 self.end_headers()268 self.wfile.write(b"<html><body>Cookie forwarded properly!</body></html>")269 return270 self.send_response(200)271 self.send_header('Content-type', "text/html")272 self.end_headers()273 self.wfile.write(b"<html><body>Cookie is missing</body></html>")274 ##################################################################################################################################275 # Sucuri validation276 ##################################################################################################################################277 elif self.path == '/sucuri_shit_3':278 # I'd like to get this down to just 2 requests (cookie bounce, and fetch).279 # Doing that requires pulling html content out of chromium, though.280 # Annoying.281 nonlocal sucuri_reqs_3282 sucuri_reqs_3 += 1283 if sucuri_reqs_3 > 3:284 raise RuntimeError("Too many requests to sucuri_shit_3 (%s)!" % sucuri_reqs_3)285 if self.headers.get_all('Cookie', failobj=[]):286 cook = self.headers.get_all('Cookie', failobj=[])[0]287 cook_key, cook_value = cook.split("=", 1)288 if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478':289 # if cook['']290 self.send_response(200)291 self.send_header('Content-type', "text/html")292 self.end_headers()293 self.wfile.write(b"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p3)?</body></html>")294 return295 container_dir = os.path.dirname(__file__)296 fpath = os.path.join(container_dir, "waf_garbage", 'sucuri_garbage.html')297 with open(fpath, "rb") as fp:298 plain_contents = fp.read()299 self.send_response(200)300 self.send_header('Content-type', "text/html")301 self.end_headers()302 self.wfile.write(plain_contents)303 elif self.path == '/sucuri_shit_2':304 # This particular path is the one we should already have a cookie for.305 # As such, we expect one request only306 nonlocal sucuri_reqs_2307 sucuri_reqs_2 += 1308 if sucuri_reqs_2 > 1:309 raise RuntimeError("Too many requests to sucuri_shit_2 (%s)!" % sucuri_reqs_2)310 if self.headers.get_all('Cookie', failobj=[]):311 cook = self.headers.get_all('Cookie', failobj=[])[0]312 cook_key, cook_value = cook.split("=", 1)313 if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478':314 # if cook['']315 self.send_response(200)316 self.send_header('Content-type', "text/html")317 self.end_headers()318 self.wfile.write(b"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p2)?</body></html>")319 return320 container_dir = os.path.dirname(__file__)321 fpath = os.path.join(container_dir, "waf_garbage", 'sucuri_garbage.html')322 with open(fpath, "rb") as fp:323 plain_contents = fp.read()324 self.send_response(200)325 self.send_header('Content-type', "text/html")326 self.end_headers()327 self.wfile.write(plain_contents)328 elif self.path == '/sucuri_shit':329 nonlocal sucuri_reqs_1330 sucuri_reqs_1 += 1331 if sucuri_reqs_1 > 4:332 raise RuntimeError("Too many requests to sucuri_shit (%s)!" % sucuri_reqs_1)333 # print("Fetch for ", self.path)334 # print("Cookies:", self.headers.get_all('Cookie', failobj=[]))335 if self.headers.get_all('Cookie', failobj=[]):336 cook = self.headers.get_all('Cookie', failobj=[])[0]337 cook_key, cook_value = cook.split("=", 1)338 if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478':339 # if cook['']340 self.send_response(200)341 self.send_header('Content-type', "text/html")342 self.end_headers()343 self.wfile.write(b"<html><head><title>At target Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>")344 return345 container_dir = os.path.dirname(__file__)346 fpath = os.path.join(container_dir, "waf_garbage", 'sucuri_garbage.html')347 with open(fpath, "rb") as fp:348 plain_contents = fp.read()349 self.send_response(200)350 self.send_header('Content-type', "text/html")351 self.end_headers()352 self.wfile.write(plain_contents)353 ##################################################################################################################################354 # Cloudflare validation355 ##################################################################################################################################356 elif self.path == '/cloudflare_under_attack_shit_2':357 if self.headers.get_all('Cookie', failobj=[]):358 cook = self.headers.get_all('Cookie', failobj=[])[0]359 cook_key, cook_value = cook.split("=", 1)360 if cook_key == 'cloudflare_validate_key' and cook_value == cookie_key:361 # if cook['']362 self.send_response(200)363 self.send_header('Content-type', "text/html")364 self.end_headers()365 self.wfile.write(b"<html><head><title>At target CF page!</title></head><body>CF Redirected OK?</body></html>")366 return367 container_dir = os.path.dirname(__file__)368 fpath = os.path.join(container_dir, "waf_garbage", 'cf_js_challenge_03_12_2018.html')369 with open(fpath, "rb") as fp:370 plain_contents = fp.read()371 self.server_version = "cloudflare is garbage"372 self.send_response(503)373 self.send_header('Server', "cloudflare is garbage")374 self.send_header('Content-type','text/html')375 self.end_headers()376 self.wfile.write(plain_contents)377 elif self.path == '/cloudflare_under_attack_shit':378 if self.headers.get_all('Cookie', failobj=[]):379 cook = self.headers.get_all('Cookie', failobj=[])[0]380 cook_key, cook_value = cook.split("=", 1)381 if cook_key == 'cloudflare_validate_key' and cook_value == cookie_key:382 # if cook['']383 self.send_response(200)384 self.send_header('Content-type', "text/html")385 self.end_headers()386 self.wfile.write(b"<html><head><title>At target CF page!</title></head><body>CF Redirected OK?</body></html>")387 return388 container_dir = os.path.dirname(__file__)389 fpath = os.path.join(container_dir, "waf_garbage", 'cf_js_challenge_03_12_2018.html')390 with open(fpath, "rb") as fp:391 plain_contents = fp.read()392 self.server_version = "cloudflare is garbage"393 self.send_response(503)394 self.send_header('Server', "cloudflare is garbage")395 self.send_header('Content-type','text/html')396 self.end_headers()397 self.wfile.write(plain_contents)398 elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=1543827239.915-44n9IE20mS&jschl_answer=9.66734594':399 cook = cookies.SimpleCookie()400 cook['cloudflare_validate_key'] = cookie_key401 cook['cloudflare_validate_key']['path'] = "/"402 cook['cloudflare_validate_key']['domain'] = ""403 expiration = datetime.datetime.now() + datetime.timedelta(days=30)404 cook['cloudflare_validate_key']["expires"] = expiration.strftime("%a, %d-%b-%Y %H:%M:%S PST")405 self.send_response(200)406 self.send_header('Content-type', "text/html")407 self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString())408 self.end_headers()409 body = "<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>"410 self.wfile.write(body.encode("utf-8"))411 ##################################################################################################################################412 # Handle requests for an unknown path413 ##################################################################################################################################414 else:415 test_context.assertEqual(self.path, "This shouldn't happen!")416 def do_GET(self):417 # Process an HTTP GET request and return a response with an HTTP 200 status.418 log.info("Request for URL path: '%s'", self.path)419 # print("Headers: ", self.headers)420 # print("Cookie(s): ", self.headers.get_all('Cookie', failobj=[]))421 try:422 return self._get_handler()423 except Exception as e:424 log.error("Exception in handler!")425 for line in traceback.format_exc().split("\n"):426 log.error(line)427 raise e428 return MockServerRequestHandler429def get_free_port():430 s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)431 s.bind(('localhost', 0))432 address, port = s.getsockname()433 s.close()434 return port435def start_server(assertion_class,436 from_wg,437 port_override = None,438 is_chromium = None,439 is_selenium_garbage_chromium = False,440 is_annoying_pjs = False,441 skip_header_checks = False442 ):443 # Configure mock server.444 if port_override:445 mock_server_port = port_override446 else:447 mock_server_port = get_free_port()448 expected_headers = dict(from_wg.browserHeaders)449 print(from_wg)450 print(expected_headers)451 assert isinstance(expected_headers, dict)452 captured_server = capture_expected_headers(453 expected_headers = expected_headers,454 test_context = assertion_class,455 is_chromium = is_chromium,456 is_selenium_garbage_chromium = is_selenium_garbage_chromium,457 is_annoying_pjs = is_annoying_pjs,458 skip_header_checks = skip_header_checks459 )460 retries = 4461 for x in range(retries + 1):462 try:463 mock_server = HTTPServer(('0.0.0.0', mock_server_port), captured_server)464 break465 except OSError:466 time.sleep(0.2)467 if x >= retries:468 raise469 # Start running mock server in a separate thread.470 # Daemon threads automatically shut down when the main process exits.471 mock_server_thread = Thread(target=mock_server.serve_forever)472 mock_server_thread.setDaemon(True)473 mock_server_thread.start()474 return mock_server_port, mock_server, mock_server_thread475if __name__ == '__main__':476 wg = WebRequest.WebGetRobust()477 srv = start_server(478 assertion_class = None,479 from_wg = wg,480 skip_header_checks = True)481 print("running server on port: ", srv)482 while 1:...
winforms.py
Source:winforms.py
...45 logger.exception(e)46 return False47 finally:48 winreg.CloseKey(net_key)49def _is_chromium():50 def edge_build(key):51 try:52 windows_key = None53 if machine() == 'x86':54 windows_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\EdgeUpdate\Clients\\' + key)55 else:56 windows_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\WOW6432Node\Microsoft\EdgeUpdate\Clients\\' + key)57 build, _ = winreg.QueryValueEx(windows_key, 'pv')58 build = int(build.replace('.', '')[:6])59 return build60 except Exception as e:61 logger.debug(e)62 finally:63 winreg.CloseKey(windows_key)64 return 065 try:66 net_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\NET Framework Setup\NDP\v4\Full')67 version, _ = winreg.QueryValueEx(net_key, 'Release')68 if version < 394802: # .NET 4.6.269 return False70 build_versions = [71 '{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}', # runtime72 '{2CD8A007-E189-409D-A2C8-9AF4EF3C72AA}', # beta73 '{0D50BFEC-CD6A-4F9A-964C-C7416E3ACB10}', # dev74 '{65C35B14-6C1D-4122-AC46-7148CC9D6497}' # canary75 ]76 for key in build_versions:77 build = edge_build(key)78 if build >= 860622: # Webview2 86.0.622.079 return True80 except Exception as e:81 logger.debug(e)82 finally:83 winreg.CloseKey(net_key)84 return False85is_cef = forced_gui_ == 'cef'86is_chromium = not is_cef and _is_chromium() and forced_gui_ not in ['mshtml', 'edgehtml']87is_edge = not is_chromium and _is_edge() and forced_gui_ != 'mshtml'88if is_cef:89 from . import cef as CEF90 IWebBrowserInterop = object91 logger.debug('Using WinForms / CEF')92 renderer = 'cef'93elif is_chromium:94 from . import edgechromium as Chromium95 IWebBrowserInterop = object96 logger.debug('Using WinForms / Chromium')97 renderer = 'edgechromium'98elif is_edge:99 from . import edgehtml as Edge100 IWebBrowserInterop = object...
file_bug_test.py
Source:file_bug_test.py
1# Copyright 2015 The Chromium Authors. All rights reserved.2# Use of this source code is governed by a BSD-style license that can be3# found in the LICENSE file.4import json5import unittest6import mock7import webapp28import webtest9# Importing mock_oauth2_decorator before file_bug mocks out10# OAuth2Decorator usage in that file.11# pylint: disable=unused-import12from dashboard import mock_oauth2_decorator13# pylint: enable=unused-import14from dashboard import file_bug15from dashboard.common import testing_common16from dashboard.common import utils17from dashboard.models import anomaly18from dashboard.models import bug_label_patterns19from dashboard.models import sheriff20class MockIssueTrackerService(object):21 """A fake version of IssueTrackerService that saves call values."""22 bug_id = 1234523 new_bug_args = None24 new_bug_kwargs = None25 add_comment_args = None26 add_comment_kwargs = None27 def __init__(self, http=None):28 pass29 @classmethod30 def NewBug(cls, *args, **kwargs):31 cls.new_bug_args = args32 cls.new_bug_kwargs = kwargs33 return cls.bug_id34 @classmethod35 def AddBugComment(cls, *args, **kwargs):36 cls.add_comment_args = args37 cls.add_comment_kwargs = kwargs38class FileBugTest(testing_common.TestCase):39 def setUp(self):40 super(FileBugTest, self).setUp()41 app = webapp2.WSGIApplication([('/file_bug', file_bug.FileBugHandler)])42 self.testapp = webtest.TestApp(app)43 testing_common.SetSheriffDomains(['chromium.org'])44 testing_common.SetIsInternalUser('internal@chromium.org', True)45 testing_common.SetIsInternalUser('foo@chromium.org', False)46 self.SetCurrentUser('foo@chromium.org')47 # Add a fake issue tracker service that we can get call values from.48 file_bug.issue_tracker_service = mock.MagicMock()49 self.original_service = file_bug.issue_tracker_service.IssueTrackerService50 self.service = MockIssueTrackerService51 file_bug.issue_tracker_service.IssueTrackerService = self.service52 def tearDown(self):53 super(FileBugTest, self).tearDown()54 file_bug.issue_tracker_service.IssueTrackerService = self.original_service55 self.UnsetCurrentUser()56 def _AddSampleAlerts(self, is_chromium=True):57 """Adds sample data and returns a dict of rev to anomaly key."""58 # Add sample sheriff, masters, bots, and tests.59 sheriff_key = sheriff.Sheriff(60 id='Sheriff',61 labels=['Performance-Sheriff', 'Cr-Blink-Javascript']).put()62 testing_common.AddTests(['ChromiumPerf'], ['linux'], {63 'scrolling': {64 'first_paint': {},65 'mean_frame_time': {},66 }67 })68 test_path1 = 'ChromiumPerf/linux/scrolling/first_paint'69 test_path2 = 'ChromiumPerf/linux/scrolling/mean_frame_time'70 test_key1 = utils.TestKey(test_path1)71 test_key2 = utils.TestKey(test_path2)72 anomaly_key1 = self._AddAnomaly(111995, 112005, test_key1, sheriff_key)73 anomaly_key2 = self._AddAnomaly(112000, 112010, test_key2, sheriff_key)74 rows_1 = testing_common.AddRows(test_path1, [112005])75 rows_2 = testing_common.AddRows(test_path2, [112010])76 if is_chromium:77 rows_1[0].r_commit_pos = 11200578 rows_2[0].r_commit_pos = 11201079 return (anomaly_key1, anomaly_key2)80 def _AddSampleClankAlerts(self):81 """Adds sample data and returns a dict of rev to anomaly key.82 The biggest difference here is that the start/end revs aren't chromium83 commit positions. This tests the _MilestoneLabel function to make sure84 it will update the end_revision if r_commit_pos is found.85 """86 # Add sample sheriff, masters, bots, and tests. Doesn't need to be Clank.87 sheriff_key = sheriff.Sheriff(88 id='Sheriff',89 labels=['Performance-Sheriff', 'Cr-Blink-Javascript']).put()90 testing_common.AddTests(['ChromiumPerf'], ['linux'], {91 'scrolling': {92 'first_paint': {},93 'mean_frame_time': {},94 }95 })96 test_path1 = 'ChromiumPerf/linux/scrolling/first_paint'97 test_path2 = 'ChromiumPerf/linux/scrolling/mean_frame_time'98 test_key1 = utils.TestKey(test_path1)99 test_key2 = utils.TestKey(test_path2)100 anomaly_key1 = self._AddAnomaly(1476193324, 1476201840,101 test_key1, sheriff_key)102 anomaly_key2 = self._AddAnomaly(1476193320, 1476201870,103 test_key2, sheriff_key)104 rows_1 = testing_common.AddRows(test_path1, [1476201840])105 rows_2 = testing_common.AddRows(test_path2, [1476201870])106 # These will be the revisions used to determine label.107 rows_1[0].r_commit_pos = 112005108 rows_2[0].r_commit_pos = 112010109 return (anomaly_key1, anomaly_key2)110 def _AddAnomaly(self, start_rev, end_rev, test_key, sheriff_key):111 return anomaly.Anomaly(112 start_revision=start_rev, end_revision=end_rev, test=test_key,113 median_before_anomaly=100, median_after_anomaly=200,114 sheriff=sheriff_key).put()115 def testGet_WithNoKeys_ShowsError(self):116 # When a request is made and no keys parameter is given,117 # an error message is shown in the reply.118 response = self.testapp.get(119 '/file_bug?summary=s&description=d&finish=true')120 self.assertIn('<div class="error">', response.body)121 self.assertIn('No alerts specified', response.body)122 def testGet_WithNoFinish_ShowsForm(self):123 # When a GET request is sent with keys specified but the finish parameter124 # is not given, the response should contain a form for the sheriff to fill125 # in bug details (summary, description, etc).126 alert_keys = self._AddSampleAlerts()127 response = self.testapp.get(128 '/file_bug?summary=s&description=d&keys=%s' % alert_keys[0].urlsafe())129 self.assertEqual(1, len(response.html('form')))130 self.assertIn('<input name="cc" type="text" value="foo@chromium.org">',131 str(response.html('form')[0]))132 def testInternalBugLabel(self):133 # If any of the alerts are marked as internal-only, which should happen134 # when the corresponding test is internal-only, then the create bug dialog135 # should suggest adding a Restrict-View-Google label.136 self.SetCurrentUser('internal@chromium.org')137 alert_keys = self._AddSampleAlerts()138 anomaly_entity = alert_keys[0].get()139 anomaly_entity.internal_only = True140 anomaly_entity.put()141 response = self.testapp.get(142 '/file_bug?summary=s&description=d&keys=%s' % alert_keys[0].urlsafe())143 self.assertIn('Restrict-View-Google', response.body)144 def testGet_SetsBugLabelsComponents(self):145 self.SetCurrentUser('internal@chromium.org')146 alert_keys = self._AddSampleAlerts()147 bug_label_patterns.AddBugLabelPattern('label1-foo', '*/*/*/first_paint')148 bug_label_patterns.AddBugLabelPattern('Cr-Performance-Blink',149 '*/*/*/mean_frame_time')150 response = self.testapp.get(151 '/file_bug?summary=s&description=d&keys=%s,%s' % (152 alert_keys[0].urlsafe(), alert_keys[1].urlsafe()))153 self.assertIn('label1-foo', response.body)154 self.assertIn('Performance>Blink', response.body)155 self.assertIn('Performance-Sheriff', response.body)156 self.assertIn('Blink>Javascript', response.body)157 @mock.patch(158 'google.appengine.api.app_identity.get_default_version_hostname',159 mock.MagicMock(return_value='chromeperf.appspot.com'))160 @mock.patch.object(161 file_bug.auto_bisect, 'StartNewBisectForBug',162 mock.MagicMock(return_value={'issue_id': 123, 'issue_url': 'foo.com'}))163 def _PostSampleBug(self, is_chromium=True, is_clankium=False):164 if is_clankium:165 alert_keys = self._AddSampleClankAlerts()166 else:167 alert_keys = self._AddSampleAlerts(is_chromium)168 response = self.testapp.post(169 '/file_bug',170 [171 ('keys', '%s,%s' % (alert_keys[0].urlsafe(),172 alert_keys[1].urlsafe())),173 ('summary', 's'),174 ('description', 'd\n'),175 ('finish', 'true'),176 ('label', 'one'),177 ('label', 'two'),178 ('component', 'Foo>Bar'),179 ])180 return response181 @mock.patch.object(182 file_bug, '_GetAllCurrentVersionsFromOmahaProxy',183 mock.MagicMock(return_value=[]))184 @mock.patch.object(185 file_bug.auto_bisect, 'StartNewBisectForBug',186 mock.MagicMock(return_value={'issue_id': 123, 'issue_url': 'foo.com'}))187 def testGet_WithFinish_CreatesBug(self):188 # When a POST request is sent with keys specified and with the finish189 # parameter given, an issue will be created using the issue tracker190 # API, and the anomalies will be updated, and a response page will191 # be sent which indicates success.192 self.service.bug_id = 277761193 response = self._PostSampleBug()194 # The response page should have a bug number.195 self.assertIn('277761', response.body)196 # The anomaly entities should be updated.197 for anomaly_entity in anomaly.Anomaly.query().fetch():198 if anomaly_entity.end_revision in [112005, 112010]:199 self.assertEqual(277761, anomaly_entity.bug_id)200 else:201 self.assertIsNone(anomaly_entity.bug_id)202 # Two HTTP requests are made when filing a bug; only test 2nd request.203 comment = self.service.add_comment_args[1]204 self.assertIn(205 'https://chromeperf.appspot.com/group_report?bug_id=277761', comment)206 self.assertIn('https://chromeperf.appspot.com/group_report?keys=', comment)207 self.assertIn(208 '\n\n\nBot(s) for this bug\'s original alert(s):\n\nlinux', comment)209 @mock.patch.object(210 file_bug, '_GetAllCurrentVersionsFromOmahaProxy',211 mock.MagicMock(return_value=[212 {213 'versions': [214 {'branch_base_position': '112000', 'current_version': '2.0'},215 {'branch_base_position': '111990', 'current_version': '1.0'}216 ]217 }218 ]))219 @mock.patch.object(220 file_bug.auto_bisect, 'StartNewBisectForBug',221 mock.MagicMock(return_value={'issue_id': 123, 'issue_url': 'foo.com'}))222 def testGet_WithFinish_LabelsBugWithMilestone(self):223 # Here, we expect the bug to have the following end revisions:224 # [112005, 112010] and the milestones are M-1 for rev 111990 and225 # M-2 for 11200. Hence the expected behavior is to label the bug226 # M-2 since 111995 (lowest possible revision introducing regression)227 # is less than 112010 (revision for M-2).228 self._PostSampleBug()229 self.assertIn('M-2', self.service.new_bug_kwargs['labels'])230 @unittest.skip('Flaky; see #1555.')231 @mock.patch(232 'google.appengine.api.urlfetch.fetch',233 mock.MagicMock(return_value=testing_common.FakeResponseObject(234 200, json.dumps([235 {236 'versions': [237 {'branch_base_position': '111999',238 'current_version': '3.0.1234.32'},239 {'branch_base_position': '112000',240 'current_version': '2.0'},241 {'branch_base_position': '111990',242 'current_version': '1.0'}243 ]244 }245 ]))))246 def testGet_WithFinish_LabelsBugWithLowestMilestonePossible(self):247 # Here, we expect the bug to have the following start revisions:248 # [111995, 112005] and the milestones are M-1 for rev 111990, M-2249 # for 112000 and M-3 for 111999. Hence the expected behavior is to250 # label the bug M-2 since 111995 is less than 112000 (M-2) and 111999251 # (M-3) AND M-2 is lower than M-3.252 self._PostSampleBug()253 self.assertIn('M-2', self.service.new_bug_kwargs['labels'])254 @mock.patch.object(255 file_bug, '_GetAllCurrentVersionsFromOmahaProxy',256 mock.MagicMock(return_value=[257 {258 'versions': [259 {'branch_base_position': '112000', 'current_version': '2.0'},260 {'branch_base_position': '111990', 'current_version': '1.0'}261 ]262 }263 ]))264 @mock.patch.object(265 file_bug.auto_bisect, 'StartNewBisectForBug',266 mock.MagicMock(return_value={'issue_id': 123, 'issue_url': 'foo.com'}))267 def testGet_WithFinish_LabelsBugWithNoMilestoneBecauseNotChromium(self):268 # Here, we expect to return no Milestone label because the alerts do not269 # contain r_commit_pos (and therefore aren't chromium). Assuming270 # testGet_WithFinish_LabelsBugWithMilestone passes, M-2271 # would be the label that it would get if the alert was Chromium.272 self._PostSampleBug(is_chromium=False)273 labels = self.service.new_bug_kwargs['labels']274 self.assertEqual(0, len([x for x in labels if x.startswith(u'M-')]))275 @mock.patch.object(276 file_bug, '_GetAllCurrentVersionsFromOmahaProxy',277 mock.MagicMock(return_value=[278 {279 'versions': [280 {'branch_base_position': '113000', 'current_version': '2.0'},281 {'branch_base_position': '112000', 'current_version': '2.0'},282 {'branch_base_position': '111990', 'current_version': '1.0'}283 ]284 }285 ]))286 @mock.patch.object(287 file_bug.auto_bisect, 'StartNewBisectForBug',288 mock.MagicMock(return_value={'issue_id': 123, 'issue_url': 'foo.com'}))289 def testGet_WithFinish_LabelsBugForClank(self):290 # Here, we expect to return M-2 even though the alert revisions aren't291 # even close to the branching points. We use r_commmit_pos to determine292 # which revision to check. There are 3 branching points to ensure we are293 # actually changing the revision that is checked to r_commit_pos instead294 # of just displaying the highest one (previous behavior).295 self._PostSampleBug(is_clankium=True)296 self.assertIn('M-2', self.service.new_bug_kwargs['labels'])297 @mock.patch(298 'google.appengine.api.urlfetch.fetch',299 mock.MagicMock(return_value=testing_common.FakeResponseObject(300 200, '[]')))301 def testGet_WithFinish_SucceedsWithNoVersions(self):302 # Here, we test that we don't label the bug with an unexpected value when303 # there is no version information from omahaproxy (for whatever reason)304 self._PostSampleBug()305 labels = self.service.new_bug_kwargs['labels']306 self.assertEqual(0, len([x for x in labels if x.startswith(u'M-')]))307 @mock.patch(308 'google.appengine.api.urlfetch.fetch',309 mock.MagicMock(return_value=testing_common.FakeResponseObject(310 200, '[]')))311 def testGet_WithFinish_SucceedsWithComponents(self):312 # Here, we test that components are posted separately from labels.313 self._PostSampleBug()314 self.assertIn('Foo>Bar', self.service.new_bug_kwargs['components'])315 @mock.patch(316 'google.appengine.api.urlfetch.fetch',317 mock.MagicMock(return_value=testing_common.FakeResponseObject(318 200, json.dumps([319 {320 'versions': [321 {'branch_base_position': '0', 'current_version': '1.0'}322 ]323 }324 ]))))325 def testGet_WithFinish_SucceedsWithRevisionOutOfRange(self):326 # Here, we test that we label the bug with the highest milestone when the327 # revision introducing regression is beyond all milestones in the list.328 self._PostSampleBug()329 self.assertIn('M-1', self.service.new_bug_kwargs['labels'])330 @mock.patch(331 'google.appengine.api.urlfetch.fetch',332 mock.MagicMock(return_value=testing_common.FakeResponseObject(333 200, json.dumps([334 {335 'versions': [336 {'branch_base_position': 'N/A', 'current_version': 'N/A'}337 ]338 }339 ]))))340 @mock.patch('logging.warn')341 def testGet_WithFinish_SucceedsWithNAAndLogsWarning(self, mock_warn):342 self._PostSampleBug()343 labels = self.service.new_bug_kwargs['labels']344 self.assertEqual(0, len([x for x in labels if x.startswith(u'M-')]))345 self.assertEqual(1, mock_warn.call_count)346if __name__ == '__main__':...
test_headful.py
Source:test_headful.py
1# Copyright (c) Microsoft Corporation.2#3# Licensed under the Apache License, Version 2.0 (the "License")4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7# http:#www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14import pytest15from flaky import flaky16async def test_should_have_default_url_when_launching_browser(17 browser_type, launch_arguments, tmpdir18):19 browser_context = await browser_type.launch_persistent_context(20 tmpdir, **{**launch_arguments, "headless": False}21 )22 urls = [page.url for page in browser_context.pages]23 assert urls == ["about:blank"]24 await browser_context.close()25async def test_headless_should_be_able_to_read_cookies_written_by_headful(26 browser_type, launch_arguments, server, tmpdir, is_chromium, is_win27):28 if is_chromium and is_win:29 pytest.skip("see https://github.com/microsoft/playwright/issues/717")30 return31 # Write a cookie in headful chrome32 headful_context = await browser_type.launch_persistent_context(33 tmpdir, **{**launch_arguments, "headless": False}34 )35 headful_page = await headful_context.new_page()36 await headful_page.goto(server.EMPTY_PAGE)37 await headful_page.evaluate(38 """() => document.cookie = 'foo=true; expires=Fri, 31 Dec 9999 23:59:59 GMT'"""39 )40 await headful_context.close()41 # Read the cookie from headless chrome42 headless_context = await browser_type.launch_persistent_context(43 tmpdir, **{**launch_arguments, "headless": True}44 )45 headless_page = await headless_context.new_page()46 await headless_page.goto(server.EMPTY_PAGE)47 cookie = await headless_page.evaluate("() => document.cookie")48 await headless_context.close()49 # This might throw. See https://github.com/GoogleChrome/puppeteer/issues/277850 assert cookie == "foo=true"51async def test_should_close_browser_with_beforeunload_page(52 browser_type, launch_arguments, server, tmpdir53):54 browser_context = await browser_type.launch_persistent_context(55 tmpdir, **{**launch_arguments, "headless": False}56 )57 page = await browser_context.new_page()58 await page.goto(server.PREFIX + "/beforeunload.html")59 # We have to interact with a page so that 'beforeunload' handlers60 # fire.61 await page.click("body")62 await browser_context.close()63async def test_should_not_crash_when_creating_second_context(64 browser_type, launch_arguments, server65):66 browser = await browser_type.launch(**{**launch_arguments, "headless": False})67 browser_context = await browser.new_context()68 await browser_context.new_page()69 await browser_context.close()70 browser_context = await browser.new_context()71 await browser_context.new_page()72 await browser_context.close()73 await browser.close()74async def test_should_click_background_tab(browser_type, launch_arguments, server):75 browser = await browser_type.launch(**{**launch_arguments, "headless": False})76 page = await browser.new_page()77 await page.set_content(78 f'<button>Hello</button><a target=_blank href="{server.EMPTY_PAGE}">empty.html</a>'79 )80 await page.click("a")81 await page.click("button")82 await browser.close()83async def test_should_close_browser_after_context_menu_was_triggered(84 browser_type, launch_arguments, server85):86 browser = await browser_type.launch(**{**launch_arguments, "headless": False})87 page = await browser.new_page()88 await page.goto(server.PREFIX + "/grid.html")89 await page.click("body", button="right")90 await browser.close()91async def test_should_not_block_third_party_cookies(92 browser_type, launch_arguments, server, is_chromium, is_firefox93):94 browser = await browser_type.launch(**{**launch_arguments, "headless": False})95 page = await browser.new_page()96 await page.goto(server.EMPTY_PAGE)97 await page.evaluate(98 """src => {99 let fulfill;100 const promise = new Promise(x => fulfill = x);101 const iframe = document.createElement('iframe');102 document.body.appendChild(iframe);103 iframe.onload = fulfill;104 iframe.src = src;105 return promise;106 }""",107 server.CROSS_PROCESS_PREFIX + "/grid.html",108 )109 document_cookie = await page.frames[1].evaluate(110 """() => {111 document.cookie = 'username=John Doe';112 return document.cookie;113 }"""114 )115 await page.wait_for_timeout(2000)116 allows_third_party = is_chromium or is_firefox117 assert document_cookie == ("username=John Doe" if allows_third_party else "")118 cookies = await page.context.cookies(server.CROSS_PROCESS_PREFIX + "/grid.html")119 if allows_third_party:120 assert cookies == [121 {122 "domain": "127.0.0.1",123 "expires": -1,124 "httpOnly": False,125 "name": "username",126 "path": "/",127 "sameSite": "None",128 "secure": False,129 "value": "John Doe",130 }131 ]132 else:133 assert cookies == []134 await browser.close()135@pytest.mark.skip_browser("webkit")136async def test_should_not_override_viewport_size_when_passed_null(137 browser_type, launch_arguments, server138):139 # Our WebKit embedder does not respect window features.140 browser = await browser_type.launch(**{**launch_arguments, "headless": False})141 context = await browser.new_context(no_viewport=True)142 page = await context.new_page()143 await page.goto(server.EMPTY_PAGE)144 async with page.expect_popup() as popup_info:145 await page.evaluate(146 """() => {147 const win = window.open(window.location.href, 'Title', 'toolbar=no,location=no,directories=no,status=no,menubar=no,scrollbars=yes,resizable=yes,width=600,height=300,top=0,left=0');148 win.resizeTo(500, 450);149 }"""150 )151 popup = await popup_info.value152 await popup.wait_for_load_state()153 await popup.wait_for_function(154 """() => window.outerWidth === 500 && window.outerHeight === 450"""155 )156 await context.close()157 await browser.close()158@flaky159async def test_page_bring_to_front_should_work(browser_type, launch_arguments):160 browser = await browser_type.launch(**{**launch_arguments, "headless": False})161 page1 = await browser.new_page()162 await page1.set_content("Page1")163 page2 = await browser.new_page()164 await page2.set_content("Page2")165 await page1.bring_to_front()166 assert await page1.evaluate("document.visibilityState") == "visible"167 assert await page2.evaluate("document.visibilityState") == "visible"168 await page2.bring_to_front()169 assert await page1.evaluate("document.visibilityState") == "visible"170 assert await page2.evaluate("document.visibilityState") == "visible"...
LambdaTest’s Playwright tutorial will give you a broader idea about the Playwright automation framework, its unique features, and use cases with examples to exceed your understanding of Playwright testing. This tutorial will give A to Z guidance, from installing the Playwright framework to some best practices and advanced concepts.
Get 100 minutes of automation test minutes FREE!!