Best Python code snippet using playwright-python
android.py
Source:android.py
...175 def device_data(self):176 return self.__data177 178 179 def _launch_server(self, command): 180 sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM)181 abort_event = self.__abort_event182 abort_event.clear()183 self_ip = self.__widgets['ip_list'].current_data[0]184 try:185 port = 10000186 while True:187 try:188 sockobj.bind((self_ip, port))189 except socket.error:190 port += 1191 if port > 65535:192 raise socket.error193 else:194 break195 196 self_port = port197 with self.__lock:198 self.__ip_port = (self_ip, self_port)199 self.__password = password = random.randint(0, 65535)200 self.__iv = iv = get_random_bytes(16)201 self.__key = key = get_random_bytes(32)202 qr_string = json.dumps({203 "ip": self_ip, 204 "port": self_port, 205 "password": password, 206 "aes": {207 "iv": b64encode(iv).decode(),208 "key": b64encode(key).decode()},209 "command": command})210 image = qrcode.make(qr_string).resize((self.qr_size, self.qr_size)) 211 self.__qr_image = ImageTk.PhotoImage(image=image) 212 213 if self.__qr_id is None:214 self.__qr_id = self.__qr_canvas.canvas.create_image((0, 0), image=self.__qr_image, anchor='nw')215 else:216 self.__qr_canvas.canvas.itemconfig(self.__qr_id, image=self.__qr_image) 217 self.__data_book.select(self._qr_tab)218 except Exception as err: 219 sockobj.close()220 with self.__lock:221 self.__ip_port = None222 raise err223 224 self.__enable_transfer_widgets(False)225 self.__view_model.idle.set(False)226 227 # Launch the data transfer thread 228 @self.root_node.thread_manager.new_thread_do229 def transfer():230 def show_head(datainfo, addr, read):231 mark = '=' if read else '*'232 direction = 'From' if read else 'To'233 wtext = self.__scrolled_text234 wtext.append_text(f'''235{mark*60}236{direction} Device237Device Info: 238 Manufacturer: {datainfo["manufacturer"]}239 Model: {datainfo["model"]}240IP: {addr[0]}241{datetime.datetime.now().isoformat()}242{mark*60}''') 243 244 245 def clear_qr_image():246 try:247 self.__qr_canvas.canvas.delete(self.__qr_id)248 except tk.TclError:249 pass250 self.__qr_id = None 251 252 253 def generate_plugin_link(data, type_):254 wtext = self.__scrolled_text255 256 for plugin in _plugins[type_]:257 if not plugin.test_data(data):258 continue259 260 @wtext.create_link_tag261 def link_action(dumb, plugin=plugin):262 plugin.action(data)263 264 wtext.append_text(f'[{plugin.link_text}]', link_action)265 wtext.append_text(' ') 266 267 def recv_head(ih):268 exit_flag = ih.recv(1)269 if exit_flag != b'\x00':270 # WaveSyn can abort a misson by sending a zero to itself.271 return True, None, 0272 head_len = struct.unpack("!I", ih.recv(4))[0]273 head_json = ih.recv(head_len).decode("utf-8")274 head_obj = json.loads(head_json)275 if head_obj["password"] != self.__password:276 return True, None, 0277 info_len = head_obj["info_len"]278 encrypted_info = ih.recv(info_len)279 decrypted_info = _decrypt_text(encrypted_info, key=self.__key, iv=self.__iv)280 datainfo = json.loads(decrypted_info)281 return False, datainfo, head_obj["data_len"]282 283 284 def recv_data(ih):285 data_list = []286 while True:287 data = ih.recv(8192)288 if not data:289 break290 data_list.append(data)291 return b''.join(data_list)292 293 294 try:295 sockobj.listen(2)296 sockobj.settimeout(0.5)297 abort_flag = False298 while True:299 try:300 conn, addr = sockobj.accept()301 except socket.timeout:302 if abort_event.is_set():303 abort_flag = True304 break305 else:306 break307 308 if abort_flag:309 raise AbortException310 311 ih = InterruptHandler(conn, 0.5, lambda dumb: abort_event.is_set())312 # Always manipulate the GUI components in the main thread. 313 self.root_node.thread_manager.main_thread_do(block=False)(clear_qr_image)314 315 exit_flag, datainfo, datalen = recv_head(ih)316 if exit_flag:317 return318 319 if command['action'] == 'read':320 data = None321 # Loop receiving data322 if command['source'] != 'storage':323 # For file transfer mission,324 # if the file is large, recv_data will be memory consuming. 325 data = recv_data(ih)326 # End receiving data 327 328 if command['source'] in ('clipboard', 'location_sensor'):329 # Store received data330 text = _decrypt_text(data, key=self.__key, iv=self.__iv)331 self.__data = {332 "data": text,333 "type": "text"}334 335 if command['source'] == 'location_sensor':336 pos = json.loads(text) 337 text = f'latitude={pos["latitude"]}, longitude={pos["longitude"]}'338 339 @self.root_node.thread_manager.main_thread_do(block=False)340 def show_text():341 # Always manipulate the GUI components in the main thread!342 scrolled_text = self.__scrolled_text343 show_head(datainfo, addr=addr, read=True)344 scrolled_text.append_text(f'\n\n{text}\n\n\n')345 346 generate_plugin_link(data=text, type_='text')347 if command['source'] == 'location_sensor':348 generate_plugin_link(data=pos, type_='locationsensor')349 scrolled_text.append_text('\n'*3)350 scrolled_text.see('end')351 352 elif command['source'] == 'gallery':353 bio = BytesIO(data)354 img = Image.open(bio)355 @self.root_node.thread_manager.main_thread_do(block=False)356 def show_image():357 scrolled_text = self.__scrolled_text358 show_head(datainfo, addr=addr, read=True)359 scrolled_text.append_text('\n'*2)360 pil_frame = PILImageFrame(361 scrolled_text.text, 362 pil_image=img, 363 balloon=self.root_node.gui.balloon)364 scrolled_text.text.window_create('end', window=pil_frame)365 scrolled_text.append_text('\n'*4)366 scrolled_text.see('end')367 368 elif command['source'] == 'storage':369 filename = Path(datainfo["filename"])370 directory = Path(self.__save_file_dir)371 path = directory/filename372 if path.exists():373 stem, ext = filename.stem, filename.suffix374 for k in range(1, 10000):375 filename = f'{stem}[{k}]{ext}'376 path = directory/filename377 if not path.exists():378 break379 with TemporaryFile() as tf:380 recvcnt = 0381 self.root_node.thread_manager.main_thread_do(block=False)(lambda: self.__view_model.transfer_progress.set(0))382 while True:383 buf = ih.recv(65536)384 if not buf:385 self.root_node.thread_manager.main_thread_do(block=False)(lambda: self.__view_model.transfer_progress.set(0))386 break387 tf.write(buf)388 recvcnt += len(buf)389 self.root_node.thread_manager.main_thread_do(block=False)(lambda: self.__view_model.transfer_progress.set(int(recvcnt/datalen*100)))390 391 tf.seek(0, 0)392 with path.open('wb') as f:393 buflen = 65536394 aes = AES.new(self.__key, AES.MODE_CBC, iv=self.__iv)395 last_buf = b""396 while True:397 buf = tf.read(buflen)398 recvcnt += buflen399 decrypted_buf = aes.decrypt(last_buf)400 if not buf:401 decrypted_buf = Padding.unpad(decrypted_buf, block_size=16)402 f.write(decrypted_buf)403 if not buf:404 break405 else:406 last_buf = buf407 @self.root_node.thread_manager.main_thread_do(block=False)408 def show_info():409 show_head(datainfo, addr=addr, read=True)410 scrolled_text = self.__scrolled_text411 scrolled_text.append_text(f'\n\n{str(path)}\n\n')412 generate_plugin_link(data=str(path), type_='file')413 scrolled_text.append_text('\n'*3)414 scrolled_text.see('end')415 416 417 @self.root_node.thread_manager.main_thread_do(block=False)418 def on_finish():419 # Always manipulate the GUI components in the main thread.420 self.__data_book.select(self._data_tab)421 if self.__on_finish:422 self.__on_finish(self.device_data['data'])423 self.__on_finish = None 424 425 elif command['action'] == 'write': 426 if command['target'] == 'clipboard':427 text = self.root_node.interfaces.os.clipboard.read()428 data = {'data':text, 'type':'text'}429 ih.send(json.dumps(data).encode('utf-8'))430 elif command['target'].startswith('dir:'):431 if command['source'] == 'clipboard:image':432 from PIL import ImageGrab433 image = ImageGrab.grabclipboard()434 if not image:435 raise TypeError('No image in clipboard.')436 bio = BytesIO()437 image.save(bio, format='png')438 ih.send(bio.getvalue())439 bio.close()440 elif command['source'].startswith('storage'):441 filename = Path(self.__send_filename)442 filelen = filename.stat().st_size443 sendcnt = 0444 buflen = 65536445 with open(filename, 'rb') as file_send:446 while True: 447 buf = file_send.read(buflen)448 if not buf:449 self.__view_model.transfer_progress.set(0)450 break451 ih.send(buf)452 sendcnt += len(buf)453 self.__view_model.transfer_progress.set(int(sendcnt/filelen*100))454 @self.root_node.thread_manager.main_thread_do(block=False)455 def on_finish():456 self.__data_book.select(self._data_tab)457 scrolled_text = self.__scrolled_text458 show_head(datainfo, addr=addr, read=False)459 scrolled_text.append_text('\n'*3)460 scrolled_text.see('end')461 except AbortException:462 self.root_node.thread_manager.main_thread_do(block=False)(clear_qr_image)463 finally:464 sockobj.close()465 with self.__lock:466 self.__ip_port = None 467 @self.root_node.thread_manager.main_thread_do(block=False)468 def finished():469 self.__enable_transfer_widgets(True)470 self.__view_model.idle.set(True)471 472 473 @WaveSynScriptAPI474 def read_device_clipboard(self, on_finish):475 self._launch_server(command={'action':'read', 'source':'clipboard'})476 self.__on_finish = on_finish477 478 479 @WaveSynScriptAPI480 def pick_gallery_photo(self, on_finish):481 self._launch_server(command={'action':'read', 'source':'gallery'})482 self.__on_finish = on_finish483 484 485 @WaveSynScriptAPI 486 def get_device_file(self, savein, on_finish):487 self.__save_file_dir = self.root_node.gui.dialogs.constant_handler_ASK_DIRECTORY(savein)488 self._launch_server(command={'action':'read', 'source':'storage'})489 490 491 @WaveSynScriptAPI492 def read_device_location(self, on_finish):493 self._launch_server(command={'action':'read', 'source':'location_sensor'})494 self.__on_finish = on_finish495 496 497 @WaveSynScriptAPI 498 def write_device_clipboard(self):499 self._launch_server(command={'action':'write', 'source':'', 'target':'clipboard'}) 500 501 502 @WaveSynScriptAPI 503 def send_clipboard_image_to_device(self):504 self._launch_server(command={505 'action':'write', 506 'target':'dir:Download', 507 'source':'clipboard:image', 508 'name':f'clipboard_{int(datetime.datetime.now().timestamp())}.png'})509 510 511 @WaveSynScriptAPI512 def send_image_to_device(self, filename):513 filename = Path(self.root_node.gui.dialogs.constant_handler_ASK_OPEN_FILENAME(filename))514 self.__send_filename = filename515 self._launch_server(command={516 'action':'write',517 'target':'dir:Download',518 'source':'storage:image',519 'name':f'from_pc_{filename.name}'})520 521 522 @WaveSynScriptAPI523 def send_file_to_device(self, filename):524 filename = Path(self.root_node.gui.dialogs.constant_handler_ASK_OPEN_FILENAME(filename))525 self.__send_filename = filename526 self._launch_server(command={527 'action':'write',528 'target':'dir:Download',529 'source':'storage:file',530 'name':f'from_pc_{filename.name}'})531 532 533 @WaveSynScriptAPI534 def abort(self):535 self.__abort_event.set()536 537 538 def __on_abort(self):539 with code_printer():540 self.abort()...
test_load_balance.py
Source:test_load_balance.py
...116 assert lb.ttask == 0117 with raises(BalancerError) as e:118 lb._test_init_limit('ttask', 11, 1, 10)119 assert "must be lesser then" in str(e)120def test_launch_server(umax_lb, mocker):121 mocked_add_task_server = mocker.patch.object(umax_lb, "_add_task_server")122 assert umax_lb.server_id_count == 0123 assert len(umax_lb.servers_in_use) == 0124 assert umax_lb.umax == 2125 umax_lb._launch_server(1)126 assert umax_lb.server_id_count == 1127 assert len(umax_lb.servers_in_use) == 1128 assert "S-1" in umax_lb.servers_in_use129 assert mocked_add_task_server.call_count == 1130def test_launch_server_more_tasks(umax_lb, mocker):131 mocked_add_task_server = mocker.patch.object(umax_lb, "_add_task_server")132 assert umax_lb.server_id_count == 0133 assert len(umax_lb.servers_in_use) == 0134 assert umax_lb.umax == 2135 umax_lb._launch_server(2)136 assert umax_lb.server_id_count == 1137 assert len(umax_lb.servers_in_use) == 1138 assert "S-1" in umax_lb.servers_in_use139 assert mocked_add_task_server.call_count == 2140def test_launch_server_too_much_tasks(umax_lb):141 assert umax_lb.server_id_count == 0142 assert len(umax_lb.servers_in_use) == 0143 assert umax_lb.umax == 2144 with raises(BalancerError) as e:145 umax_lb._launch_server(3)146 assert "Invalid number of tasks" in str(e)147def test_add_task_server(umax_lb):148 umax_lb._launch_server(1)149 server_name = "S-1"150 assert server_name in umax_lb.servers_in_use151 assert len(umax_lb.servers_in_use[server_name]["tasks"]) == 1152 umax_lb._add_task_server(server_name)153 assert len(umax_lb.servers_in_use[server_name]["tasks"]) == 2154def test_add_task_server_invalid_server_name(umax_lb):155 assert len(umax_lb.servers_in_use) == 0156 with raises(BalancerError) as e:157 umax_lb._add_task_server("do_not_exist")158 assert "not found" in str(e)159def test_add_task_server_server_limit_reached(umax_lb):160 umax_lb._launch_server(2)161 server_name = "S-1"162 assert server_name in umax_lb.servers_in_use163 assert len(umax_lb.servers_in_use[server_name]["tasks"]) == 2164 with raises(BalancerError) as e:165 umax_lb._add_task_server(server_name)166 assert "can't start new task" in str(e)167def test_find_server_for_task(umax_lb):168 umax_lb._launch_server(1)169 assert umax_lb._find_server_for_task() == 'S-1'170def test_find_server_for_task_no_server_available(umax_lb):171 umax_lb._launch_server(2)172 assert umax_lb._find_server_for_task() is None173def test_add_new_clients_max_tasks(umax_lb, mocker):174 mocker_launch_server = mocker.patch.object(umax_lb, '_launch_server')175 mocker_find_server_for_task = mocker.patch.object(umax_lb, '_find_server_for_task')176 mocker_add_task_server = mocker.patch.object(umax_lb, '_add_task_server')177 umax_lb._add_new_clients(2)178 assert mocker_launch_server.called_once()179 assert mocker_find_server_for_task.call_count == 0180 assert mocker_add_task_server.call_count == 0181def test_add_new_clients_min_tasks_twice(umax_lb, mocker):182 mocker_launch_server = mocker.patch.object(umax_lb, '_launch_server')183 mocker_find_server_for_task = mocker.patch.object(umax_lb, '_find_server_for_task')184 mocker_add_task_server = mocker.patch.object(umax_lb, '_add_task_server')185 umax_lb._add_new_clients(1)186 assert mocker_launch_server.called_once_with(1)187 assert mocker_find_server_for_task.called_once()188 umax_lb._add_new_clients(1)189 assert mocker_add_task_server.called_once()190 assert mocker_find_server_for_task.call_count == 2191def test_remove_server(umax_lb):192 umax_lb._launch_server(1)193 server_name = "S-1"194 del umax_lb.servers_in_use[server_name]["tasks"]["T-1"]195 assert len(umax_lb.servers_in_use) == 1196 umax_lb._remove_server(server_name)197 assert len(umax_lb.servers_in_use) == 0198def test_remove_server_running_tasks(umax_lb):199 umax_lb._launch_server(1)200 with raises(BalancerError) as e:201 umax_lb._remove_server("S-1")202 assert "still has tasks running" in str(e)203def test_remove_server_unknown(umax_lb):204 with raises(BalancerError) as e:205 umax_lb._remove_server("S-1")206 assert "not found" in str(e)207def test_remove_task_server(umax_lb):208 umax_lb._launch_server(2)209 server_name = "S-1"210 assert len(umax_lb.servers_in_use[server_name]["tasks"]) == 2211 umax_lb._remove_task_server("T-1", server_name)212 assert len(umax_lb.servers_in_use[server_name]["tasks"]) == 1213def test_remove_task_server_invalid_server_name(umax_lb):214 umax_lb._launch_server(2)215 assert len(umax_lb.servers_in_use["S-1"]["tasks"]) == 2216 fake_server_name = "S-2"217 with raises(BalancerError) as e:218 umax_lb._remove_task_server("T-1", fake_server_name)219 assert f"Server {fake_server_name} not found" in str(e)220def test_remove_task_server_invalid_task_name(umax_lb):221 umax_lb._launch_server(2)222 server_name = "S-1"223 assert len(umax_lb.servers_in_use[server_name]["tasks"]) == 2224 fake_task_name = "S-2"225 with raises(BalancerError) as e:226 umax_lb._remove_task_server(fake_task_name, server_name)227 assert f"Task {fake_task_name} not found in server" in str(e)228def test_run_tick(umax_lb):229 umax_lb._add_new_clients(4)230 assert umax_lb.tick_servers_count == 0231 assert len(umax_lb.servers_in_use) == 2232 server1_name = "S-1"233 assert len(umax_lb.servers_in_use[server1_name]["tasks"]) == 2234 assert umax_lb.servers_in_use[server1_name]["tasks"]["T-1"] == umax_lb.ttask235 assert umax_lb.servers_in_use[server1_name]["tasks"]["T-2"] == umax_lb.ttask...
test_usage.py
Source:test_usage.py
...20 def setUp(self):21 super().setUp()22 self.server = None23 self.context = None24 def _launch_server(self, **kwargs):25 self.server = volatildap.LdapServer(**kwargs)26 self.server.start()27 self.context = {28 'dirname': self.server._tempdir.name,29 'pid': self.server._process.pid,30 }31 def tearDown(self):32 if self.server is not None:33 self.server.stop()34 self.assertServerStopped(self.context)35 super().tearDown()36 def assertServerStopped(self, context, max_delay=5):37 now = time.time()38 # Allow some time for proper shutdown39 while time.time() < now + max_delay and os.path.exists(context['dirname']):40 time.sleep(0.2)41 self.assertFalse(os.path.exists(context['dirname']))42 # Check that the process is no longer running.43 # We cannot rely solely on "the pid is no longer running", as it may44 # have been reused by the operating system.45 # If a process by that pid still exists, we'll check that we aren't its parent.46 try:47 stats = psutil.Process(context['pid'])48 ppid = stats.ppid()49 except (psutil.NoSuchProcess, psutil.AccessDenied):50 # Process has died / is not in our context: all is fine.51 return52 self.assertNotEqual(ppid, os.getpid())53class ReadWriteTests(LdapServerTestCase):54 data = {55 'ou=test': {56 'objectClass': ['organizationalUnit'],57 'ou': ['test'],58 },59 }60 def test_initial_data(self):61 self._launch_server(initial_data=self.data)62 entry = self.server.get('ou=test,dc=example,dc=org')63 self.assertEqual({64 'objectClass': [b'organizationalUnit'],65 'ou': [b'test'],66 }, entry)67 def test_post_start_add(self):68 self._launch_server()69 self.server.add(self.data)70 entry = self.server.get('ou=test,dc=example,dc=org')71 self.assertEqual({72 'objectClass': [b'organizationalUnit'],73 'ou': [b'test'],74 }, entry)75 def test_get_missing_entry(self):76 self._launch_server()77 with self.assertRaises(KeyError):78 self.server.get('ou=test,dc=example,dc=org')79 def test_clear_data(self):80 self._launch_server()81 self.server.add(self.data)82 self.assertIsNotNone(self.server.get('ou=test,dc=example,dc=org'))83 self.server.start() # Actually a restart84 # Custom data has been removed85 with self.assertRaises(KeyError):86 self.server.get('ou=test,dc=example,dc=org')87 # Core data is still there88 self.assertIsNotNone(self.server.get('dc=example,dc=org'))89class ResetTests(LdapServerTestCase):90 data = {91 'ou=test': {92 'objectClass': ['organizationalUnit'],93 'ou': ['test'],94 },95 }96 def test_cleanup(self):97 self._launch_server(initial_data=self.data)98 extra = {99 'ou=subarea,ou=test': {100 'objectClass': ['organizationalUnit'],101 },102 }103 self.server.add(extra)104 entry = self.server.get('ou=subarea,ou=test,dc=example,dc=org')105 self.server.reset()106 # Extra data should have been removed107 self.assertRaises(KeyError, self.server.get, 'ou=subarea,ou=test,dc=example,dc=org')108 # Initial data should still be here109 entry = self.server.get('ou=test,dc=example,dc=org')110 self.assertEqual({111 'objectClass': [b'organizationalUnit'],112 'ou': [b'test'],113 }, entry)114class TLSTests(LdapServerTestCase):115 def test_connection(self):116 self._launch_server(tls_config=volatildap.LOCALHOST_TLS_CONFIG)117 self.assertEqual(self.server.uri[:8], 'ldaps://')118 entry = self.server.get('dc=example,dc=org')119 self.assertEqual([b'example'], entry['dc'])120class AutoCleanupTests(LdapServerTestCase):121 def test_stop(self):122 """Deleting the LdapServer object causes its cleanup."""123 self._launch_server()124class ControlTests(LdapServerTestCase):125 def setUp(self):126 super().setUp()127 self.proxy = None128 def tearDown(self):129 if self.proxy:130 self.proxy.stop()131 super().tearDown()132 def _launch_server(self, **kwargs):133 control_port = find_available_port()134 super()._launch_server(135 control_address=('localhost', control_port),136 **kwargs,137 )138 self.proxy = volatildap.ProxyServer('http://localhost:%d/' % control_port)139 def test_launch_control(self):140 self._launch_server()141 self.assertEqual(142 self.server.uri,143 self.proxy.uri,144 )145 self.assertEqual('dc=example,dc=org', self.proxy.suffix)146 self.assertEqual(self.server.rootdn, self.proxy.rootdn)147 self.assertEqual(self.server.rootpw, self.proxy.rootpw)148 def test_tls(self):149 """The server CA should be available through the proxy."""150 self._launch_server(tls_config=volatildap.LOCALHOST_TLS_CONFIG)151 self.assertEqual(self.proxy.uri[:8], 'ldaps://')152 self.assertIsNotNone(self.proxy.tls_config)153 self.assertIsNotNone(self.proxy.tls_config.root)154 def test_get(self):155 initial = {'ou=people': {156 'objectClass': ['organizationalUnit'],157 'ou': ['people'],158 }}159 self._launch_server(160 initial_data=initial,161 )162 entry = self.proxy.get('ou=people')163 self.assertEqual(164 {165 'objectClass': [b'organizationalUnit'],166 'ou': [b'people'],167 },168 entry,169 )170 def test_add(self):171 self._launch_server()172 data = {'ou=people': {173 'objectClass': ['organizationalUnit'],174 'ou': ['people'],175 }}176 self.proxy.add(data)177 entry = self.proxy.get('ou=people')178 self.assertEqual(179 {180 'objectClass': [b'organizationalUnit'],181 'ou': [b'people'],182 },183 entry,184 )185 def test_reset(self):186 self._launch_server()187 data = {'ou=people': {188 'objectClass': ['organizationalUnit'],189 'ou': ['people'],190 }}191 self.proxy.add(data)192 # Ensure the data is visible193 self.proxy.get('ou=people')194 self.proxy.reset()195 with self.assertRaises(KeyError):...
tf_two_machines_local.py
Source:tf_two_machines_local.py
...38parser.add_argument("--receiver-ip", default='127.0.0.1')39args = parser.parse_args()40cluster_spec = {'chief': [args.sender_ip + ':32300'],41 'receiver': [args.receiver_ip + ':32301']}42def _launch_server(role):43 os.environ['TF_CONFIG'] = json.dumps(44 {'cluster': cluster_spec,45 'task': {'type': role, 'index': 0}})46 config = tf.estimator.RunConfig()47 return tf.train.Server(config.cluster_spec,48 job_name=config.task_type,49 task_index=config.task_id)50def run_launcher():51 import ncluster52 ncluster.util.assert_script_in_current_directory()53 54 if args.aws:55 ncluster.set_backend('aws')56 # use 4GB instance, 0.5GB not enough57 worker = ncluster.make_task(args.name, image_name=args.image,58 instance_type='t3.medium')59 worker.upload(__file__)60 worker.upload('util.py')61 # kill python just for when tmux session reuse is on62 if not ncluster.running_locally():63 # on AWS probably running in conda DLAMI, switch into TF-enabled env64 worker._run_raw('killall python', ignore_errors=True)65 worker.run('source activate tensorflow_p36')66 ip_config = f'--sender-ip={worker.ip} --receiver-ip={worker.ip}'67 worker.run(f'python {__file__} --role=receiver {ip_config}',68 non_blocking=True)69 worker.switch_window(1) # run in new tmux window70 if not ncluster.running_locally():71 worker.run('source activate tensorflow_p36')72 worker.run(73 f'python {__file__} --role=sender {ip_config} --iters={args.iters} --size-mb={args.size_mb} --shards={args.shards}')74 print(worker.read('out'))75def run_receiver():76 server = _launch_server('receiver')77 time.sleep(365 * 24 * 3600)78 del server79def run_sender():80 param_size = 250 * 1000 * args.size_mb // args.shards # 1MB is 250k integers81 log = util.FileLogger('out')82 grads_array = []83 with tf.device('/job:chief/task:0'):84 # grads = tf.fill([param_size], 1.)85 for i in range(args.shards):86 grads = tf.Variable(tf.ones([param_size]))87 grads_array.append(grads)88 params_array = []89 add_op_array = []90 with tf.device('/job:receiver/task:0'):91 for i in range(args.shards):92 params = tf.Variable(tf.ones([param_size]))93 add_op = params.assign(grads_array[i]).op94 params_array.append(params)95 add_op_array.append(add_op)96 add_op = tf.group(*add_op_array)97 98 server = _launch_server('chief')99 sess = tf.Session(server.target)100 sess.run(tf.global_variables_initializer())101 # except Exception as e:102 # # sometimes .run fails with .UnavailableError: OS Error103 # log(f"initialization failed with {e}, retrying in 1 second")104 # time.sleep(1)105 time_list = []106 for i in range(args.iters):107 start_time = time.perf_counter()108 sess.run(add_op)109 elapsed_time_ms = (time.perf_counter() - start_time) * 1000110 time_list.append(elapsed_time_ms)111 rate = args.size_mb / (elapsed_time_ms / 1000)112 log('%03d/%d sent %d MBs in %.1f ms: %.2f MB/second' % (...
LambdaTest’s Playwright tutorial will give you a broader idea about the Playwright automation framework, its unique features, and use cases with examples to exceed your understanding of Playwright testing. This tutorial will give A to Z guidance, from installing the Playwright framework to some best practices and advanced concepts.
Get 100 minutes of automation test minutes FREE!!