Best JavaScript code snippet using playwright-internal
kinesis_stream.py
Source:kinesis_stream.py
...252 results = dict()253 try:254 if not check_mode:255 results = (256 client.list_tags_for_stream(**params)['Tags']257 )258 else:259 results = [260 {261 'Key': 'DryRunMode',262 'Value': 'true'263 },264 ]265 success = True266 except botocore.exceptions.ClientError as e:267 err_msg = str(e)268 return success, err_msg, results269def find_stream(client, stream_name, check_mode=False):270 """Retrieve a Kinesis Stream.271 Args:272 client (botocore.client.EC2): Boto3 client.273 stream_name (str): Name of the Kinesis stream.274 Kwargs:275 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.276 default=False277 Basic Usage:278 >>> client = boto3.client('kinesis')279 >>> stream_name = 'test-stream'280 Returns:281 Tuple (bool, str, dict)282 """283 err_msg = ''284 success = False285 params = {286 'StreamName': stream_name,287 }288 results = dict()289 has_more_shards = True290 shards = list()291 try:292 if not check_mode:293 while has_more_shards:294 results = (295 client.describe_stream(**params)['StreamDescription']296 )297 shards.extend(results.pop('Shards'))298 has_more_shards = results['HasMoreShards']299 results['Shards'] = shards300 results['ShardsCount'] = len(shards)301 else:302 results = {303 'HasMoreShards': True,304 'RetentionPeriodHours': 24,305 'StreamName': stream_name,306 'StreamARN': 'arn:aws:kinesis:east-side:123456789:stream/{0}'.format(stream_name),307 'StreamStatus': 'ACTIVE'308 }309 success = True310 except botocore.exceptions.ClientError as e:311 err_msg = str(e)312 return success, err_msg, results313def wait_for_status(client, stream_name, status, wait_timeout=300,314 check_mode=False):315 """Wait for the the status to change for a Kinesis Stream.316 Args:317 client (botocore.client.EC2): Boto3 client318 stream_name (str): The name of the kinesis stream.319 status (str): The status to wait for.320 examples. status=available, status=deleted321 Kwargs:322 wait_timeout (int): Number of seconds to wait, until this timeout is reached.323 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.324 default=False325 Basic Usage:326 >>> client = boto3.client('kinesis')327 >>> stream_name = 'test-stream'328 >>> wait_for_status(client, stream_name, 'ACTIVE', 300)329 Returns:330 Tuple (bool, str, dict)331 """332 polling_increment_secs = 5333 wait_timeout = time.time() + wait_timeout334 status_achieved = False335 stream = dict()336 err_msg = ""337 while wait_timeout > time.time():338 try:339 find_success, find_msg, stream = (340 find_stream(client, stream_name, check_mode=check_mode)341 )342 if check_mode:343 status_achieved = True344 break345 elif status != 'DELETING':346 if find_success and stream:347 if stream.get('StreamStatus') == status:348 status_achieved = True349 break350 elif status == 'DELETING' and not check_mode:351 if not find_success:352 status_achieved = True353 break354 else:355 time.sleep(polling_increment_secs)356 except botocore.exceptions.ClientError as e:357 err_msg = str(e)358 if not status_achieved:359 err_msg = "Wait time out reached, while waiting for results"360 else:361 err_msg = "Status {0} achieved successfully".format(status)362 return status_achieved, err_msg, stream363def tags_action(client, stream_name, tags, action='create', check_mode=False):364 """Create or delete multiple tags from a Kinesis Stream.365 Args:366 client (botocore.client.EC2): Boto3 client.367 resource_id (str): The Amazon resource id.368 tags (list): List of dictionaries.369 examples.. [{Name: "", Values: [""]}]370 Kwargs:371 action (str): The action to perform.372 valid actions == create and delete373 default=create374 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.375 default=False376 Basic Usage:377 >>> client = boto3.client('ec2')378 >>> resource_id = 'pcx-123345678'379 >>> tags = {'env': 'development'}380 >>> update_tags(client, resource_id, tags)381 [True, '']382 Returns:383 List (bool, str)384 """385 success = False386 err_msg = ""387 params = {'StreamName': stream_name}388 try:389 if not check_mode:390 if action == 'create':391 params['Tags'] = tags392 client.add_tags_to_stream(**params)393 success = True394 elif action == 'delete':395 params['TagKeys'] = tags.keys()396 client.remove_tags_from_stream(**params)397 success = True398 else:399 err_msg = 'Invalid action {0}'.format(action)400 else:401 if action == 'create':402 success = True403 elif action == 'delete':404 success = True405 else:406 err_msg = 'Invalid action {0}'.format(action)407 except botocore.exceptions.ClientError as e:408 err_msg = str(e)409 return success, err_msg410def recreate_tags_from_list(list_of_tags):411 """Recreate tags from a list of tuples into the Amazon Tag format.412 Args:413 list_of_tags (list): List of tuples.414 Basic Usage:415 >>> list_of_tags = [('Env', 'Development')]416 >>> recreate_tags_from_list(list_of_tags)417 [418 {419 "Value": "Development",420 "Key": "Env"421 }422 ]423 Returns:424 List425 """426 tags = list()427 i = 0428 list_of_tags = list_of_tags429 for i in range(len(list_of_tags)):430 key_name = list_of_tags[i][0]431 key_val = list_of_tags[i][1]432 tags.append(433 {434 'Key': key_name,435 'Value': key_val436 }437 )438 return tags439def update_tags(client, stream_name, tags, check_mode=False):440 """Update tags for an amazon resource.441 Args:442 resource_id (str): The Amazon resource id.443 tags (dict): Dictionary of tags you want applied to the Kinesis stream.444 Kwargs:445 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.446 default=False447 Basic Usage:448 >>> client = boto3.client('ec2')449 >>> stream_name = 'test-stream'450 >>> tags = {'env': 'development'}451 >>> update_tags(client, stream_name, tags)452 [True, '']453 Return:454 Tuple (bool, str)455 """456 success = False457 changed = False458 err_msg = ''459 tag_success, tag_msg, current_tags = (460 get_tags(client, stream_name, check_mode=check_mode)461 )462 if current_tags:463 tags = make_tags_in_aws_format(tags)464 current_tags_set = (465 set(466 reduce(467 lambda x, y: x + y,468 [make_tags_in_proper_format(current_tags).items()]469 )470 )471 )472 new_tags_set = (473 set(474 reduce(475 lambda x, y: x + y,476 [make_tags_in_proper_format(tags).items()]477 )478 )479 )480 tags_to_delete = list(current_tags_set.difference(new_tags_set))481 tags_to_update = list(new_tags_set.difference(current_tags_set))482 if tags_to_delete:483 tags_to_delete = make_tags_in_proper_format(484 recreate_tags_from_list(tags_to_delete)485 )486 delete_success, delete_msg = (487 tags_action(488 client, stream_name, tags_to_delete, action='delete',489 check_mode=check_mode490 )491 )492 if not delete_success:493 return delete_success, changed, delete_msg494 if tags_to_update:495 tags = make_tags_in_proper_format(496 recreate_tags_from_list(tags_to_update)497 )498 else:499 return True, changed, 'Tags do not need to be updated'500 if tags:501 create_success, create_msg = (502 tags_action(503 client, stream_name, tags, action='create',504 check_mode=check_mode505 )506 )507 if create_success:508 changed = True509 return create_success, changed, create_msg510 return success, changed, err_msg511def stream_action(client, stream_name, shard_count=1, action='create',512 timeout=300, check_mode=False):513 """Create or Delete an Amazon Kinesis Stream.514 Args:515 client (botocore.client.EC2): Boto3 client.516 stream_name (str): The name of the kinesis stream.517 Kwargs:518 shard_count (int): Number of shards this stream will use.519 action (str): The action to perform.520 valid actions == create and delete521 default=create522 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.523 default=False524 Basic Usage:525 >>> client = boto3.client('kinesis')526 >>> stream_name = 'test-stream'527 >>> shard_count = 20528 >>> stream_action(client, stream_name, shard_count, action='create')529 Returns:530 List (bool, str)531 """532 success = False533 err_msg = ''534 params = {535 'StreamName': stream_name536 }537 try:538 if not check_mode:539 if action == 'create':540 params['ShardCount'] = shard_count541 client.create_stream(**params)542 success = True543 elif action == 'delete':544 client.delete_stream(**params)545 success = True546 else:547 err_msg = 'Invalid action {0}'.format(action)548 else:549 if action == 'create':550 success = True551 elif action == 'delete':552 success = True553 else:554 err_msg = 'Invalid action {0}'.format(action)555 except botocore.exceptions.ClientError as e:556 err_msg = str(e)557 return success, err_msg558def retention_action(client, stream_name, retention_period=24,559 action='increase', check_mode=False):560 """Increase or Decreaste the retention of messages in the Kinesis stream.561 Args:562 client (botocore.client.EC2): Boto3 client.563 stream_name (str): The564 Kwargs:565 retention_period (int): This is how long messages will be kept before566 they are discarded. This can not be less than 24 hours.567 action (str): The action to perform.568 valid actions == create and delete569 default=create570 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.571 default=False572 Basic Usage:573 >>> client = boto3.client('kinesis')574 >>> stream_name = 'test-stream'575 >>> retention_period = 48576 >>> stream_action(client, stream_name, retention_period, action='create')577 Returns:578 Tuple (bool, str)579 """580 success = False581 err_msg = ''582 params = {583 'StreamName': stream_name584 }585 try:586 if not check_mode:587 if action == 'increase':588 params['RetentionPeriodHours'] = retention_period589 client.increase_stream_retention_period(**params)590 success = True591 err_msg = (592 'Retention Period increased successfully to {0}'593 .format(retention_period)594 )595 elif action == 'decrease':596 params['RetentionPeriodHours'] = retention_period597 client.decrease_stream_retention_period(**params)598 success = True599 err_msg = (600 'Retention Period decreased successfully to {0}'601 .format(retention_period)602 )603 else:604 err_msg = 'Invalid action {0}'.format(action)605 else:606 if action == 'increase':607 success = True608 elif action == 'decrease':609 success = True610 else:611 err_msg = 'Invalid action {0}'.format(action)612 except botocore.exceptions.ClientError as e:613 err_msg = str(e)614 return success, err_msg615def update(client, current_stream, stream_name, retention_period=None,616 tags=None, wait=False, wait_timeout=300, check_mode=False):617 """Update an Amazon Kinesis Stream.618 Args:619 client (botocore.client.EC2): Boto3 client.620 stream_name (str): The name of the kinesis stream.621 Kwargs:622 retention_period (int): This is how long messages will be kept before623 they are discarded. This can not be less than 24 hours.624 tags (dict): The tags you want applied.625 wait (bool): Wait until Stream is ACTIVE.626 default=False627 wait_timeout (int): How long to wait until this operation is considered failed.628 default=300629 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.630 default=False631 Basic Usage:632 >>> client = boto3.client('kinesis')633 >>> current_stream = {634 'HasMoreShards': True,635 'RetentionPeriodHours': 24,636 'StreamName': 'test-stream',637 'StreamARN': 'arn:aws:kinesis:us-west-2:123456789:stream/test-stream',638 'StreamStatus': "ACTIVE'639 }640 >>> stream_name = 'test-stream'641 >>> retention_period = 48642 >>> stream_action(client, current_stream, stream_name,643 retention_period, action='create' )644 Returns:645 Tuple (bool, bool, str)646 """647 success = True648 changed = False649 err_msg = ''650 if retention_period:651 if wait:652 wait_success, wait_msg, current_stream = (653 wait_for_status(654 client, stream_name, 'ACTIVE', wait_timeout,655 check_mode=check_mode656 )657 )658 if not wait_success:659 return wait_success, False, wait_msg660 if current_stream['StreamStatus'] == 'ACTIVE':661 retention_changed = False662 if retention_period > current_stream['RetentionPeriodHours']:663 retention_changed, retention_msg = (664 retention_action(665 client, stream_name, retention_period, action='increase',666 check_mode=check_mode667 )668 )669 elif retention_period < current_stream['RetentionPeriodHours']:670 retention_changed, retention_msg = (671 retention_action(672 client, stream_name, retention_period, action='decrease',673 check_mode=check_mode674 )675 )676 elif retention_period == current_stream['RetentionPeriodHours']:677 retention_msg = (678 'Retention {0} is the same as {1}'679 .format(680 retention_period,681 current_stream['RetentionPeriodHours']682 )683 )684 success = True685 if retention_changed:686 success = True687 changed = True688 err_msg = retention_msg689 if changed and wait:690 wait_success, wait_msg, current_stream = (691 wait_for_status(692 client, stream_name, 'ACTIVE', wait_timeout,693 check_mode=check_mode694 )695 )696 if not wait_success:697 return wait_success, False, wait_msg698 elif changed and not wait:699 stream_found, stream_msg, current_stream = (700 find_stream(client, stream_name, check_mode=check_mode)701 )702 if stream_found:703 if current_stream['StreamStatus'] != 'ACTIVE':704 err_msg = (705 'Retention Period for {0} is in the process of updating'706 .format(stream_name)707 )708 return success, changed, err_msg709 else:710 err_msg = (711 'StreamStatus has to be ACTIVE in order to modify the retention period. Current status is {0}'712 .format(current_stream['StreamStatus'])713 )714 return success, changed, err_msg715 if tags:716 _, _, err_msg = (717 update_tags(client, stream_name, tags, check_mode=check_mode)718 )719 if wait:720 success, err_msg, _ = (721 wait_for_status(722 client, stream_name, 'ACTIVE', wait_timeout,723 check_mode=check_mode724 )725 )726 if success and changed:727 err_msg = 'Kinesis Stream {0} updated successfully.'.format(stream_name)728 elif success and not changed:729 err_msg = 'Kinesis Stream {0} did not changed.'.format(stream_name)730 return success, changed, err_msg731def create_stream(client, stream_name, number_of_shards=1, retention_period=None,732 tags=None, wait=False, wait_timeout=300, check_mode=False):733 """Create an Amazon Kinesis Stream.734 Args:735 client (botocore.client.EC2): Boto3 client.736 stream_name (str): The name of the kinesis stream.737 Kwargs:738 number_of_shards (int): Number of shards this stream will use.739 default=1740 retention_period (int): Can not be less than 24 hours741 default=None742 tags (dict): The tags you want applied.743 default=None744 wait (bool): Wait until Stream is ACTIVE.745 default=False746 wait_timeout (int): How long to wait until this operation is considered failed.747 default=300748 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.749 default=False750 Basic Usage:751 >>> client = boto3.client('kinesis')752 >>> stream_name = 'test-stream'753 >>> number_of_shards = 10754 >>> tags = {'env': 'test'}755 >>> create_stream(client, stream_name, number_of_shards, tags=tags)756 Returns:757 Tuple (bool, bool, str, dict)758 """759 success = False760 changed = False761 err_msg = ''762 results = dict()763 stream_found, stream_msg, current_stream = (764 find_stream(client, stream_name, check_mode=check_mode)765 )766 if stream_found and not check_mode:767 if current_stream['ShardsCount'] != number_of_shards:768 err_msg = 'Can not change the number of shards in a Kinesis Stream'769 return success, changed, err_msg, results770 if stream_found and current_stream['StreamStatus'] == 'DELETING' and wait:771 wait_success, wait_msg, current_stream = (772 wait_for_status(773 client, stream_name, 'ACTIVE', wait_timeout,774 check_mode=check_mode775 )776 )777 if stream_found and current_stream['StreamStatus'] != 'DELETING':778 success, changed, err_msg = update(779 client, current_stream, stream_name, retention_period, tags,780 wait, wait_timeout, check_mode=check_mode781 )782 else:783 create_success, create_msg = (784 stream_action(785 client, stream_name, number_of_shards, action='create',786 check_mode=check_mode787 )788 )789 if create_success:790 changed = True791 if wait:792 wait_success, wait_msg, results = (793 wait_for_status(794 client, stream_name, 'ACTIVE', wait_timeout,795 check_mode=check_mode796 )797 )798 err_msg = (799 'Kinesis Stream {0} is in the process of being created'800 .format(stream_name)801 )802 if not wait_success:803 return wait_success, True, wait_msg, results804 else:805 err_msg = (806 'Kinesis Stream {0} created successfully'807 .format(stream_name)808 )809 if tags:810 changed, err_msg = (811 tags_action(812 client, stream_name, tags, action='create',813 check_mode=check_mode814 )815 )816 if changed:817 success = True818 if not success:819 return success, changed, err_msg, results820 stream_found, stream_msg, current_stream = (821 find_stream(client, stream_name, check_mode=check_mode)822 )823 if retention_period and current_stream['StreamStatus'] == 'ACTIVE':824 changed, err_msg = (825 retention_action(826 client, stream_name, retention_period, action='increase',827 check_mode=check_mode828 )829 )830 if changed:831 success = True832 if not success:833 return success, changed, err_msg, results834 else:835 err_msg = (836 'StreamStatus has to be ACTIVE in order to modify the retention period. Current status is {0}'837 .format(current_stream['StreamStatus'])838 )839 success = create_success840 changed = True841 if success:842 _, _, results = (843 find_stream(client, stream_name, check_mode=check_mode)844 )845 _, _, current_tags = (846 get_tags(client, stream_name, check_mode=check_mode)847 )848 if current_tags and not check_mode:849 current_tags = make_tags_in_proper_format(current_tags)850 results['Tags'] = current_tags851 elif check_mode and tags:852 results['Tags'] = tags853 else:854 results['Tags'] = dict()855 results = convert_to_lower(results)856 return success, changed, err_msg, results857def delete_stream(client, stream_name, wait=False, wait_timeout=300,858 check_mode=False):859 """Delete an Amazon Kinesis Stream.860 Args:861 client (botocore.client.EC2): Boto3 client.862 stream_name (str): The name of the kinesis stream.863 Kwargs:864 wait (bool): Wait until Stream is ACTIVE.865 default=False866 wait_timeout (int): How long to wait until this operation is considered failed.867 default=300868 check_mode (bool): This will pass DryRun as one of the parameters to the aws api.869 default=False870 Basic Usage:871 >>> client = boto3.client('kinesis')872 >>> stream_name = 'test-stream'873 >>> delete_stream(client, stream_name)874 Returns:875 Tuple (bool, bool, str, dict)876 """877 success = False878 changed = False879 err_msg = ''880 results = dict()881 stream_found, stream_msg, current_stream = (882 find_stream(client, stream_name, check_mode=check_mode)883 )884 if stream_found:885 success, err_msg = (886 stream_action(887 client, stream_name, action='delete', check_mode=check_mode888 )889 )890 if success:891 changed = True892 if wait:893 success, err_msg, results = (894 wait_for_status(895 client, stream_name, 'DELETING', wait_timeout,896 check_mode=check_mode897 )898 )899 err_msg = 'Stream {0} deleted successfully'.format(stream_name)900 if not success:901 return success, True, err_msg, results902 else:903 err_msg = (904 'Stream {0} is in the process of being deleted'905 .format(stream_name)906 )907 else:908 success = True909 changed = False910 err_msg = 'Stream {0} does not exist'.format(stream_name)911 return success, changed, err_msg, results912def main():913 argument_spec = ec2_argument_spec()914 argument_spec.update(915 dict(916 name=dict(default=None, required=True),917 shards=dict(default=None, required=False, type='int'),918 retention_period=dict(default=None, required=False, type='int'),919 tags=dict(default=None, required=False, type='dict', aliases=['resource_tags']),920 wait=dict(default=True, required=False, type='bool'),921 wait_timeout=dict(default=300, required=False, type='int'),922 state=dict(default='present', choices=['present', 'absent']),923 )924 )925 module = AnsibleModule(926 argument_spec=argument_spec,927 supports_check_mode=True,928 )929 retention_period = module.params.get('retention_period')930 stream_name = module.params.get('name')931 shards = module.params.get('shards')932 state = module.params.get('state')933 tags = module.params.get('tags')934 wait = module.params.get('wait')935 wait_timeout = module.params.get('wait_timeout')936 if state == 'present' and not shards:937 module.fail_json(msg='Shards is required when state == present.')938 if retention_period:939 if retention_period < 24:940 module.fail_json(msg='Retention period can not be less than 24 hours.')941 if not HAS_BOTO3:942 module.fail_json(msg='boto3 is required.')943 check_mode = module.check_mode944 try:945 region, ec2_url, aws_connect_kwargs = (946 get_aws_connection_info(module, boto3=True)947 )948 client = (949 boto3_conn(950 module, conn_type='client', resource='kinesis',951 region=region, endpoint=ec2_url, **aws_connect_kwargs952 )953 )954 except botocore.exceptions.ClientError as e:955 err_msg = 'Boto3 Client Error - {0}'.format(str(e.msg))956 module.fail_json(957 success=False, changed=False, result={}, msg=err_msg958 )959 if state == 'present':960 success, changed, err_msg, results = (961 create_stream(962 client, stream_name, shards, retention_period, tags,963 wait, wait_timeout, check_mode964 )965 )966 elif state == 'absent':967 success, changed, err_msg, results = (968 delete_stream(client, stream_name, wait, wait_timeout, check_mode)969 )970 if success:971 module.exit_json(972 success=success, changed=changed, msg=err_msg, **results973 )974 else:975 module.fail_json(976 success=success, changed=changed, msg=err_msg, result=results977 )978# import module snippets979from ansible.module_utils.basic import *980from ansible.module_utils.ec2 import *981if __name__ == '__main__':982 main()
teststreams.py
Source:teststreams.py
1# -*- coding: utf-8 -*-2import os3import unittest4from StringIO import StringIO5import antlr36class TestStringStream(unittest.TestCase):7 """Test case for the StringStream class."""8 def testSize(self):9 """StringStream.size()"""10 stream = antlr3.StringStream('foo')11 self.failUnlessEqual(stream.size(), 3)12 def testIndex(self):13 """StringStream.index()"""14 stream = antlr3.StringStream('foo')15 self.failUnlessEqual(stream.index(), 0)16 def testConsume(self):17 """StringStream.consume()"""18 stream = antlr3.StringStream('foo\nbar')19 stream.consume() # f20 self.failUnlessEqual(stream.index(), 1)21 self.failUnlessEqual(stream.charPositionInLine, 1)22 self.failUnlessEqual(stream.line, 1)23 stream.consume() # o24 self.failUnlessEqual(stream.index(), 2)25 self.failUnlessEqual(stream.charPositionInLine, 2)26 self.failUnlessEqual(stream.line, 1)27 stream.consume() # o28 self.failUnlessEqual(stream.index(), 3)29 self.failUnlessEqual(stream.charPositionInLine, 3)30 self.failUnlessEqual(stream.line, 1)31 stream.consume() # \n32 self.failUnlessEqual(stream.index(), 4)33 self.failUnlessEqual(stream.charPositionInLine, 0)34 self.failUnlessEqual(stream.line, 2)35 stream.consume() # b36 self.failUnlessEqual(stream.index(), 5)37 self.failUnlessEqual(stream.charPositionInLine, 1)38 self.failUnlessEqual(stream.line, 2)39 stream.consume() # a40 self.failUnlessEqual(stream.index(), 6)41 self.failUnlessEqual(stream.charPositionInLine, 2)42 self.failUnlessEqual(stream.line, 2)43 stream.consume() # r44 self.failUnlessEqual(stream.index(), 7)45 self.failUnlessEqual(stream.charPositionInLine, 3)46 self.failUnlessEqual(stream.line, 2)47 stream.consume() # EOF48 self.failUnlessEqual(stream.index(), 7)49 self.failUnlessEqual(stream.charPositionInLine, 3)50 self.failUnlessEqual(stream.line, 2)51 stream.consume() # EOF52 self.failUnlessEqual(stream.index(), 7)53 self.failUnlessEqual(stream.charPositionInLine, 3)54 self.failUnlessEqual(stream.line, 2)55 def testReset(self):56 """StringStream.reset()"""57 stream = antlr3.StringStream('foo')58 stream.consume()59 stream.consume()60 stream.reset()61 self.failUnlessEqual(stream.index(), 0)62 self.failUnlessEqual(stream.line, 1)63 self.failUnlessEqual(stream.charPositionInLine, 0)64 self.failUnlessEqual(stream.LT(1), 'f')65 def testLA(self):66 """StringStream.LA()"""67 stream = antlr3.StringStream('foo')68 self.failUnlessEqual(stream.LT(1), 'f')69 self.failUnlessEqual(stream.LT(2), 'o')70 self.failUnlessEqual(stream.LT(3), 'o')71 stream.consume()72 stream.consume()73 self.failUnlessEqual(stream.LT(1), 'o')74 self.failUnlessEqual(stream.LT(2), antlr3.EOF)75 self.failUnlessEqual(stream.LT(3), antlr3.EOF)76 def testSubstring(self):77 """StringStream.substring()"""78 stream = antlr3.StringStream('foobar')79 self.failUnlessEqual(stream.substring(0, 0), 'f')80 self.failUnlessEqual(stream.substring(0, 1), 'fo')81 self.failUnlessEqual(stream.substring(0, 5), 'foobar')82 self.failUnlessEqual(stream.substring(3, 5), 'bar')83 def testSeekForward(self):84 """StringStream.seek(): forward"""85 stream = antlr3.StringStream('foo\nbar')86 stream.seek(4)87 self.failUnlessEqual(stream.index(), 4)88 self.failUnlessEqual(stream.line, 2)89 self.failUnlessEqual(stream.charPositionInLine, 0)90 self.failUnlessEqual(stream.LT(1), 'b')91## # not yet implemented92## def testSeekBackward(self):93## """StringStream.seek(): backward"""94## stream = antlr3.StringStream('foo\nbar')95## stream.seek(4)96## stream.seek(1)97## self.failUnlessEqual(stream.index(), 1)98## self.failUnlessEqual(stream.line, 1)99## self.failUnlessEqual(stream.charPositionInLine, 1)100## self.failUnlessEqual(stream.LA(1), 'o')101 def testMark(self):102 """StringStream.mark()"""103 stream = antlr3.StringStream('foo\nbar')104 stream.seek(4)105 marker = stream.mark()106 self.failUnlessEqual(marker, 1)107 self.failUnlessEqual(stream.markDepth, 1)108 stream.consume()109 marker = stream.mark()110 self.failUnlessEqual(marker, 2)111 self.failUnlessEqual(stream.markDepth, 2)112 def testReleaseLast(self):113 """StringStream.release(): last marker"""114 stream = antlr3.StringStream('foo\nbar')115 stream.seek(4)116 marker1 = stream.mark()117 stream.consume()118 marker2 = stream.mark()119 stream.release()120 self.failUnlessEqual(stream.markDepth, 1)121 # release same marker again, nothing has changed122 stream.release()123 self.failUnlessEqual(stream.markDepth, 1)124 def testReleaseNested(self):125 """StringStream.release(): nested"""126 stream = antlr3.StringStream('foo\nbar')127 stream.seek(4)128 marker1 = stream.mark()129 stream.consume()130 marker2 = stream.mark()131 stream.consume()132 marker3 = stream.mark()133 stream.release(marker2)134 self.failUnlessEqual(stream.markDepth, 1)135 def testRewindLast(self):136 """StringStream.rewind(): last marker"""137 stream = antlr3.StringStream('foo\nbar')138 stream.seek(4)139 marker = stream.mark()140 stream.consume()141 stream.consume()142 stream.rewind()143 self.failUnlessEqual(stream.markDepth, 0)144 self.failUnlessEqual(stream.index(), 4)145 self.failUnlessEqual(stream.line, 2)146 self.failUnlessEqual(stream.charPositionInLine, 0)147 self.failUnlessEqual(stream.LT(1), 'b')148 def testRewindNested(self):149 """StringStream.rewind(): nested"""150 stream = antlr3.StringStream('foo\nbar')151 stream.seek(4)152 marker1 = stream.mark()153 stream.consume()154 marker2 = stream.mark()155 stream.consume()156 marker3 = stream.mark()157 stream.rewind(marker2)158 self.failUnlessEqual(stream.markDepth, 1)159 self.failUnlessEqual(stream.index(), 5)160 self.failUnlessEqual(stream.line, 2)161 self.failUnlessEqual(stream.charPositionInLine, 1)162 self.failUnlessEqual(stream.LT(1), 'a')163class TestFileStream(unittest.TestCase):164 """Test case for the FileStream class."""165 def testNoEncoding(self):166 path = os.path.join(os.path.dirname(__file__), 'teststreams.input1')167 stream = antlr3.FileStream(path)168 stream.seek(4)169 marker1 = stream.mark()170 stream.consume()171 marker2 = stream.mark()172 stream.consume()173 marker3 = stream.mark()174 stream.rewind(marker2)175 self.failUnlessEqual(stream.markDepth, 1)176 self.failUnlessEqual(stream.index(), 5)177 self.failUnlessEqual(stream.line, 2)178 self.failUnlessEqual(stream.charPositionInLine, 1)179 self.failUnlessEqual(stream.LT(1), 'a')180 self.failUnlessEqual(stream.LA(1), ord('a'))181 def testEncoded(self):182 path = os.path.join(os.path.dirname(__file__), 'teststreams.input2')183 stream = antlr3.FileStream(path, 'utf-8')184 stream.seek(4)185 marker1 = stream.mark()186 stream.consume()187 marker2 = stream.mark()188 stream.consume()189 marker3 = stream.mark()190 stream.rewind(marker2)191 self.failUnlessEqual(stream.markDepth, 1)192 self.failUnlessEqual(stream.index(), 5)193 self.failUnlessEqual(stream.line, 2)194 self.failUnlessEqual(stream.charPositionInLine, 1)195 self.failUnlessEqual(stream.LT(1), u'ä')196 self.failUnlessEqual(stream.LA(1), ord(u'ä'))197class TestInputStream(unittest.TestCase):198 """Test case for the InputStream class."""199 def testNoEncoding(self):200 file = StringIO('foo\nbar')201 stream = antlr3.InputStream(file)202 stream.seek(4)203 marker1 = stream.mark()204 stream.consume()205 marker2 = stream.mark()206 stream.consume()207 marker3 = stream.mark()208 stream.rewind(marker2)209 self.failUnlessEqual(stream.markDepth, 1)210 self.failUnlessEqual(stream.index(), 5)211 self.failUnlessEqual(stream.line, 2)212 self.failUnlessEqual(stream.charPositionInLine, 1)213 self.failUnlessEqual(stream.LT(1), 'a')214 self.failUnlessEqual(stream.LA(1), ord('a'))215 def testEncoded(self):216 file = StringIO(u'foo\nbär'.encode('utf-8'))217 stream = antlr3.InputStream(file, 'utf-8')218 stream.seek(4)219 marker1 = stream.mark()220 stream.consume()221 marker2 = stream.mark()222 stream.consume()223 marker3 = stream.mark()224 stream.rewind(marker2)225 self.failUnlessEqual(stream.markDepth, 1)226 self.failUnlessEqual(stream.index(), 5)227 self.failUnlessEqual(stream.line, 2)228 self.failUnlessEqual(stream.charPositionInLine, 1)229 self.failUnlessEqual(stream.LT(1), u'ä')230 self.failUnlessEqual(stream.LA(1), ord(u'ä'))231class TestCommonTokenStream(unittest.TestCase):232 """Test case for the StringStream class."""233 def setUp(self):234 """Setup test fixure235 The constructor of CommonTokenStream needs a token source. This236 is a simple mock class providing just the nextToken() method.237 """238 class MockSource(object):239 def __init__(self):240 self.tokens = []241 def makeEOFToken(self):242 return antlr3.CommonToken(type=antlr3.EOF)243 def nextToken(self):244 try:245 return self.tokens.pop(0)246 except IndexError:247 return None248 self.source = MockSource()249 def testInit(self):250 """CommonTokenStream.__init__()"""251 stream = antlr3.CommonTokenStream(self.source)252 self.failUnlessEqual(stream.index(), -1)253 def testSetTokenSource(self):254 """CommonTokenStream.setTokenSource()"""255 stream = antlr3.CommonTokenStream(None)256 stream.setTokenSource(self.source)257 self.failUnlessEqual(stream.index(), -1)258 self.failUnlessEqual(stream.channel, antlr3.DEFAULT_CHANNEL)259 def testLTEmptySource(self):260 """CommonTokenStream.LT(): EOF (empty source)"""261 stream = antlr3.CommonTokenStream(self.source)262 lt1 = stream.LT(1)263 self.failUnlessEqual(lt1.type, antlr3.EOF)264 def testLT1(self):265 """CommonTokenStream.LT(1)"""266 self.source.tokens.append(267 antlr3.CommonToken(type=12)268 )269 stream = antlr3.CommonTokenStream(self.source)270 lt1 = stream.LT(1)271 self.failUnlessEqual(lt1.type, 12)272 def testLT1WithHidden(self):273 """CommonTokenStream.LT(1): with hidden tokens"""274 self.source.tokens.append(275 antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)276 )277 self.source.tokens.append(278 antlr3.CommonToken(type=13)279 )280 stream = antlr3.CommonTokenStream(self.source)281 lt1 = stream.LT(1)282 self.failUnlessEqual(lt1.type, 13)283 def testLT2BeyondEnd(self):284 """CommonTokenStream.LT(2): beyond end"""285 self.source.tokens.append(286 antlr3.CommonToken(type=12)287 )288 self.source.tokens.append(289 antlr3.CommonToken(type=13, channel=antlr3.HIDDEN_CHANNEL)290 )291 stream = antlr3.CommonTokenStream(self.source)292 lt1 = stream.LT(2)293 self.failUnlessEqual(lt1.type, antlr3.EOF)294 # not yet implemented295 def testLTNegative(self):296 """CommonTokenStream.LT(-1): look back"""297 self.source.tokens.append(298 antlr3.CommonToken(type=12)299 )300 self.source.tokens.append(301 antlr3.CommonToken(type=13)302 )303 stream = antlr3.CommonTokenStream(self.source)304 stream.fillBuffer()305 stream.consume()306 lt1 = stream.LT(-1)307 self.failUnlessEqual(lt1.type, 12)308 def testLB1(self):309 """CommonTokenStream.LB(1)"""310 self.source.tokens.append(311 antlr3.CommonToken(type=12)312 )313 self.source.tokens.append(314 antlr3.CommonToken(type=13)315 )316 stream = antlr3.CommonTokenStream(self.source)317 stream.fillBuffer()318 stream.consume()319 self.failUnlessEqual(stream.LB(1).type, 12)320 def testLTZero(self):321 """CommonTokenStream.LT(0)"""322 self.source.tokens.append(323 antlr3.CommonToken(type=12)324 )325 self.source.tokens.append(326 antlr3.CommonToken(type=13)327 )328 stream = antlr3.CommonTokenStream(self.source)329 lt1 = stream.LT(0)330 self.failUnless(lt1 is None)331 def testLBBeyondBegin(self):332 """CommonTokenStream.LB(-1): beyond begin"""333 self.source.tokens.append(334 antlr3.CommonToken(type=12)335 )336 self.source.tokens.append(337 antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)338 )339 self.source.tokens.append(340 antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)341 )342 self.source.tokens.append(343 antlr3.CommonToken(type=13)344 )345 stream = antlr3.CommonTokenStream(self.source)346 self.failUnless(stream.LB(1) is None)347 stream.consume()348 stream.consume()349 self.failUnless(stream.LB(3) is None)350 def testFillBuffer(self):351 """CommonTokenStream.fillBuffer()"""352 self.source.tokens.append(353 antlr3.CommonToken(type=12)354 )355 self.source.tokens.append(356 antlr3.CommonToken(type=13)357 )358 self.source.tokens.append(359 antlr3.CommonToken(type=14)360 )361 self.source.tokens.append(362 antlr3.CommonToken(type=antlr3.EOF)363 )364 stream = antlr3.CommonTokenStream(self.source)365 stream.fillBuffer()366 self.failUnlessEqual(len(stream.tokens), 3)367 self.failUnlessEqual(stream.tokens[0].type, 12)368 self.failUnlessEqual(stream.tokens[1].type, 13)369 self.failUnlessEqual(stream.tokens[2].type, 14)370 def testConsume(self):371 """CommonTokenStream.consume()"""372 self.source.tokens.append(373 antlr3.CommonToken(type=12)374 )375 self.source.tokens.append(376 antlr3.CommonToken(type=13)377 )378 self.source.tokens.append(379 antlr3.CommonToken(type=antlr3.EOF)380 )381 stream = antlr3.CommonTokenStream(self.source)382 self.failUnlessEqual(stream.LA(1), 12)383 stream.consume()384 self.failUnlessEqual(stream.LA(1), 13)385 stream.consume()386 self.failUnlessEqual(stream.LA(1), antlr3.EOF)387 stream.consume()388 self.failUnlessEqual(stream.LA(1), antlr3.EOF)389 def testSeek(self):390 """CommonTokenStream.seek()"""391 self.source.tokens.append(392 antlr3.CommonToken(type=12)393 )394 self.source.tokens.append(395 antlr3.CommonToken(type=13)396 )397 self.source.tokens.append(398 antlr3.CommonToken(type=antlr3.EOF)399 )400 stream = antlr3.CommonTokenStream(self.source)401 self.failUnlessEqual(stream.LA(1), 12)402 stream.seek(2)403 self.failUnlessEqual(stream.LA(1), antlr3.EOF)404 stream.seek(0)405 self.failUnlessEqual(stream.LA(1), 12)406 def testMarkRewind(self):407 """CommonTokenStream.mark()/rewind()"""408 self.source.tokens.append(409 antlr3.CommonToken(type=12)410 )411 self.source.tokens.append(412 antlr3.CommonToken(type=13)413 )414 self.source.tokens.append(415 antlr3.CommonToken(type=antlr3.EOF)416 )417 stream = antlr3.CommonTokenStream(self.source)418 stream.fillBuffer()419 stream.consume()420 marker = stream.mark()421 stream.consume()422 stream.rewind(marker)423 self.failUnlessEqual(stream.LA(1), 13)424 def testToString(self):425 """CommonTokenStream.toString()"""426 self.source.tokens.append(427 antlr3.CommonToken(type=12, text="foo")428 )429 self.source.tokens.append(430 antlr3.CommonToken(type=13, text="bar")431 )432 self.source.tokens.append(433 antlr3.CommonToken(type=14, text="gnurz")434 )435 self.source.tokens.append(436 antlr3.CommonToken(type=15, text="blarz")437 )438 stream = antlr3.CommonTokenStream(self.source)439 assert stream.toString() == "foobargnurzblarz"440 assert stream.toString(1, 2) == "bargnurz"441 assert stream.toString(stream.tokens[1], stream.tokens[-2]) == "bargnurz"442if __name__ == "__main__":...
qa_stream_demux.py
Source:qa_stream_demux.py
...54 return (dst0.data(), dst1.data())55 def help_stream_tag_propagation(self, N, stream_sizes):56 src_data = (stream_sizes[0]*[1,] + stream_sizes[1]*[2,] + stream_sizes[2]*[3,]) * N57 src = blocks.vector_source_f(src_data, False)58 tag_stream1 = blocks.stream_to_tagged_stream(gr.sizeof_float, 1,59 stream_sizes[0], 'src1')60 tag_stream2 = blocks.stream_to_tagged_stream(gr.sizeof_float, 1,61 stream_sizes[1], 'src2')62 tag_stream3 = blocks.stream_to_tagged_stream(gr.sizeof_float, 1,63 stream_sizes[2], 'src3')64 demux = stream_demux.stream_demux(gr.sizeof_float, stream_sizes)65 dst0 = blocks.vector_sink_f()66 dst1 = blocks.vector_sink_f()67 dst2 = blocks.vector_sink_f()68 self.tb.connect(src, tag_stream1)69 self.tb.connect(tag_stream1, tag_stream2)70 self.tb.connect(tag_stream2, tag_stream3)71 self.tb.connect(tag_stream3, demux)72 self.tb.connect((demux,0), dst0)73 self.tb.connect((demux,1), dst1)74 self.tb.connect((demux,2), dst2)75 self.tb.run()76 return (dst0, dst1, dst2)...
BUILD
Source:BUILD
1# Description:2# ROCm-platform specific StreamExecutor support code.3load(4 "//tensorflow/stream_executor:build_defs.bzl",5 "stream_executor_friends",6)7load("//tensorflow:tensorflow.bzl", "tf_copts")8load("@local_config_rocm//rocm:build_defs.bzl", "if_rocm_is_configured")9load("//tensorflow/core/platform:build_config_root.bzl", "if_static")10package(11 default_visibility = [":friends"],12 licenses = ["notice"], # Apache 2.013)14package_group(15 name = "friends",16 packages = stream_executor_friends(),17)18# Filegroup used to collect source files for the dependency check.19filegroup(20 name = "c_srcs",21 data = glob([22 "**/*.cc",23 "**/*.h",24 ]),25)26cc_library(27 name = "rocm_diagnostics",28 srcs = if_rocm_is_configured(["rocm_diagnostics.cc"]),29 hdrs = if_rocm_is_configured(["rocm_diagnostics.h"]),30 deps = if_rocm_is_configured([31 "@com_google_absl//absl/container:inlined_vector",32 "@com_google_absl//absl/strings",33 "@com_google_absl//absl/strings:str_format",34 "//tensorflow/stream_executor/gpu:gpu_diagnostics_header",35 "//tensorflow/stream_executor/lib",36 "//tensorflow/stream_executor/platform",37 ]),38)39cc_library(40 name = "rocm_driver",41 srcs = if_rocm_is_configured(["rocm_driver.cc"]),42 hdrs = if_rocm_is_configured(["rocm_driver_wrapper.h"]),43 deps = if_rocm_is_configured([44 ":rocm_diagnostics",45 "@com_google_absl//absl/base",46 "@com_google_absl//absl/container:inlined_vector",47 "@com_google_absl//absl/strings",48 "//tensorflow/stream_executor:device_options",49 "//tensorflow/stream_executor/gpu:gpu_driver_header",50 "//tensorflow/stream_executor/lib",51 "//tensorflow/stream_executor/platform",52 "//tensorflow/stream_executor/platform:dso_loader",53 "@local_config_rocm//rocm:rocm_headers",54 ]),55)56cc_library(57 name = "rocm_activation",58 srcs = [],59 hdrs = if_rocm_is_configured(["rocm_activation.h"]),60 deps = if_rocm_is_configured([61 ":rocm_driver",62 "@local_config_rocm//rocm:rocm_headers",63 "//tensorflow/stream_executor",64 "//tensorflow/stream_executor:stream_executor_internal",65 "//tensorflow/stream_executor/gpu:gpu_activation",66 "//tensorflow/stream_executor/platform",67 ]),68)69cc_library(70 name = "rocm_event",71 srcs = if_rocm_is_configured(["rocm_event.cc"]),72 hdrs = [],73 deps = if_rocm_is_configured([74 ":rocm_driver",75 "//tensorflow/stream_executor:stream_executor_headers",76 "//tensorflow/stream_executor/gpu:gpu_event_header",77 "//tensorflow/stream_executor/gpu:gpu_executor_header",78 "//tensorflow/stream_executor/gpu:gpu_stream_header",79 "//tensorflow/stream_executor/lib",80 ]),81)82cc_library(83 name = "rocm_gpu_executor",84 srcs = if_rocm_is_configured(["rocm_gpu_executor.cc"]),85 hdrs = [],86 deps = if_rocm_is_configured([87 ":rocm_diagnostics",88 ":rocm_driver",89 ":rocm_event",90 ":rocm_kernel",91 ":rocm_platform_id",92 "@com_google_absl//absl/strings",93 "//tensorflow/stream_executor:event",94 "//tensorflow/stream_executor:plugin_registry",95 "//tensorflow/stream_executor:stream_executor_internal",96 "//tensorflow/stream_executor:stream_executor_pimpl_header",97 "//tensorflow/stream_executor:timer",98 "//tensorflow/stream_executor/gpu:gpu_activation_header",99 "//tensorflow/stream_executor/gpu:gpu_event",100 "//tensorflow/stream_executor/gpu:gpu_kernel_header",101 "//tensorflow/stream_executor/gpu:gpu_stream",102 "//tensorflow/stream_executor/gpu:gpu_timer",103 "//tensorflow/stream_executor/lib",104 "//tensorflow/stream_executor/platform",105 "//tensorflow/stream_executor/platform:dso_loader",106 ]),107 alwayslink = True,108)109cc_library(110 name = "rocm_kernel",111 srcs = if_rocm_is_configured(["rocm_kernel.cc"]),112 hdrs = [],113 visibility = ["//visibility:public"],114 deps = if_rocm_is_configured([115 "//tensorflow/stream_executor/gpu:gpu_kernel_header",116 ]),117 alwayslink = True,118)119cc_library(120 name = "rocm_platform",121 srcs = if_rocm_is_configured(["rocm_platform.cc"]),122 hdrs = if_rocm_is_configured(["rocm_platform.h"]),123 visibility = ["//visibility:public"],124 deps = if_rocm_is_configured([125 ":rocm_driver",126 ":rocm_gpu_executor",127 ":rocm_platform_id",128 "@com_google_absl//absl/base",129 "@com_google_absl//absl/memory",130 "//tensorflow/core:lib",131 "//tensorflow/stream_executor", # buildcleaner: keep132 "//tensorflow/stream_executor:executor_cache",133 "//tensorflow/stream_executor:multi_platform_manager",134 "//tensorflow/stream_executor:stream_executor_pimpl_header",135 "//tensorflow/stream_executor/lib",136 "//tensorflow/stream_executor/platform",137 ]),138 alwayslink = True, # Registers itself with the MultiPlatformManager.139)140cc_library(141 name = "rocm_platform_id",142 srcs = ["rocm_platform_id.cc"],143 hdrs = ["rocm_platform_id.h"],144 deps = ["//tensorflow/stream_executor:platform"],145)146cc_library(147 name = "rocblas_plugin",148 srcs = if_rocm_is_configured(["rocm_blas.cc"]),149 hdrs = if_rocm_is_configured(["rocm_blas.h"]),150 visibility = ["//visibility:public"],151 deps = if_rocm_is_configured([152 ":rocm_gpu_executor",153 ":rocm_platform_id",154 "//third_party/eigen3",155 "//tensorflow/core:lib",156 "//tensorflow/core:lib_internal",157 "//tensorflow/stream_executor",158 "//tensorflow/stream_executor:event",159 "//tensorflow/stream_executor:host_or_device_scalar",160 "//tensorflow/stream_executor:plugin_registry",161 "//tensorflow/stream_executor:scratch_allocator",162 "//tensorflow/stream_executor:timer",163 "//tensorflow/stream_executor/gpu:gpu_activation",164 "//tensorflow/stream_executor/gpu:gpu_helpers_header",165 "//tensorflow/stream_executor/gpu:gpu_stream_header",166 "//tensorflow/stream_executor/gpu:gpu_timer_header",167 "//tensorflow/stream_executor/lib",168 "//tensorflow/stream_executor/platform",169 "//tensorflow/stream_executor/platform:dso_loader",170 "@com_google_absl//absl/strings",171 "@local_config_rocm//rocm:rocm_headers",172 ] + if_static([173 "@local_config_rocm//rocm:rocblas",174 ])),175 alwayslink = True,176)177cc_library(178 name = "rocfft_plugin",179 srcs = if_rocm_is_configured(["rocm_fft.cc"]),180 hdrs = if_rocm_is_configured(["rocm_fft.h"]),181 visibility = ["//visibility:public"],182 deps = if_rocm_is_configured([183 ":rocm_platform_id",184 "//tensorflow/stream_executor:event",185 "//tensorflow/stream_executor:fft",186 "//tensorflow/stream_executor:plugin_registry",187 "//tensorflow/stream_executor:scratch_allocator",188 "//tensorflow/stream_executor/gpu:gpu_activation",189 "//tensorflow/stream_executor/gpu:gpu_helpers_header",190 "//tensorflow/stream_executor/gpu:gpu_executor_header",191 "//tensorflow/stream_executor/gpu:gpu_stream_header",192 "//tensorflow/stream_executor/gpu:gpu_kernel_header",193 "//tensorflow/stream_executor/lib",194 "//tensorflow/stream_executor/platform",195 "//tensorflow/stream_executor/platform:dso_loader",196 "@local_config_rocm//rocm:rocm_headers",197 ] + if_static([198 "@local_config_rocm//rocm:rocfft",199 ])),200 alwayslink = True,201)202cc_library(203 name = "miopen_plugin",204 srcs = if_rocm_is_configured(["rocm_dnn.cc"]),205 hdrs = if_rocm_is_configured(["rocm_dnn.h"]),206 copts = [207 # STREAM_EXECUTOR_CUDNN_WRAP would fail on Clang with the default208 # setting of template depth 256209 "-ftemplate-depth-512",210 ],211 visibility = ["//visibility:public"],212 deps = if_rocm_is_configured([213 ":rocm_diagnostics",214 ":rocm_driver",215 ":rocm_gpu_executor",216 ":rocm_platform_id",217 "//third_party/eigen3",218 "//tensorflow/core:lib",219 "//tensorflow/core:lib_internal",220 "//tensorflow/stream_executor:dnn",221 "//tensorflow/stream_executor:event",222 "//tensorflow/stream_executor:plugin_registry",223 "//tensorflow/stream_executor:scratch_allocator",224 "//tensorflow/stream_executor:stream_executor_pimpl_header",225 "//tensorflow/stream_executor:temporary_device_memory",226 "//tensorflow/stream_executor/gpu:gpu_activation_header",227 "//tensorflow/stream_executor/gpu:gpu_stream_header",228 "//tensorflow/stream_executor/gpu:gpu_timer_header",229 "//tensorflow/stream_executor/lib",230 "//tensorflow/stream_executor/platform",231 "//tensorflow/stream_executor/platform:dso_loader",232 "@com_google_absl//absl/strings",233 "@local_config_rocm//rocm:rocm_headers",234 ] + if_static([235 "@local_config_rocm//rocm:miopen",236 ])),237 alwayslink = True,238)239cc_library(240 name = "rocrand_plugin",241 srcs = if_rocm_is_configured(["rocm_rng.cc"]),242 hdrs = if_rocm_is_configured([]),243 deps = if_rocm_is_configured([244 ":rocm_gpu_executor",245 ":rocm_platform_id",246 "@local_config_rocm//rocm:rocm_headers",247 "//tensorflow/stream_executor:event",248 "//tensorflow/stream_executor:plugin_registry",249 "//tensorflow/stream_executor:rng",250 "//tensorflow/stream_executor/gpu:gpu_activation_header",251 "//tensorflow/stream_executor/gpu:gpu_helpers_header",252 "//tensorflow/stream_executor/gpu:gpu_executor_header",253 "//tensorflow/stream_executor/gpu:gpu_rng_header",254 "//tensorflow/stream_executor/gpu:gpu_stream_header",255 "//tensorflow/stream_executor/lib",256 "//tensorflow/stream_executor/platform",257 "//tensorflow/stream_executor/platform:dso_loader",258 ] + if_static([259 "@local_config_rocm//rocm:hiprand",260 ])),261 alwayslink = True,262)263cc_library(264 name = "all_runtime",265 copts = tf_copts(),266 visibility = ["//visibility:public"],267 deps = if_rocm_is_configured([268 ":miopen_plugin",269 ":rocfft_plugin",270 ":rocblas_plugin",271 ":rocrand_plugin",272 ":rocm_driver",273 ":rocm_platform",274 ]),275 alwayslink = 1,...
transformer_test.py
Source:transformer_test.py
...46 """Tests transform() calls functions in the correct arrangement."""47 my_transformer = TransformerImpl()48 my_transformer.transform(mock.Mock())49 self.assertEqual(['begin', 'transform', 'end'], my_transformer.actions)50 def test_transform_initializes_input_stream(self):51 """Tests transform() initializes the input_stream before beginning."""52 input_stream = mock.Mock()53 transformer = TransformerImpl()54 # Purposely fail before sending any data55 transformer.on_begin = raise_exception(Exception)56 with self.assertRaises(Exception):57 transformer.transform(input_stream)58 # Asserts initialize was called before on_begin.59 self.assertTrue(input_stream.initialize.called)60 def test_transform_initializes_output_stream(self):61 """Tests transform() initializes the output_stream before beginning."""62 output_stream = mock.Mock()63 transformer = TransformerImpl()64 transformer.set_output_stream(output_stream)65 # Purposely fail before sending any data66 transformer.on_begin = raise_exception(Exception)67 with self.assertRaises(Exception):68 transformer.transform(mock.Mock())69 # Asserts initialize was called before on_begin.70 self.assertTrue(output_stream.initialize.called)71class SourceTransformerTest(unittest.TestCase):72 """Tests the SourceTransformer class."""73 def test_transform_ends_on_buffer_stream_end(self):74 """Tests transformation ends on stream end."""75 source_transformer = SourceTransformer()76 source_transformer.set_output_stream(mock.Mock())77 transform_buffer = mock.Mock(side_effect=[BufferStream.END])78 source_transformer._transform_buffer = transform_buffer79 output_stream = mock.Mock()80 source_transformer.transform(output_stream)81 self.assertFalse(output_stream.add_indexed_buffer.called)82 def test_transform_adds_transformed_index_buffer(self):83 source_transformer = SourceTransformer()84 output_stream = mock.Mock()85 source_transformer.set_output_stream(output_stream)86 expected_buffer = [0, 1, 2]87 transform_buffer = mock.Mock(88 side_effect=[expected_buffer, BufferStream.END])89 source_transformer._transform_buffer = transform_buffer90 source_transformer.transform(mock.Mock())91 self.assertEqual(92 expected_buffer,93 output_stream.add_indexed_buffer.call_args[ARGS][0].buffer)94 def test_transform_increases_buffer_index_each_call(self):95 source_transformer = SourceTransformer()96 output_stream = mock.Mock()97 source_transformer.set_output_stream(output_stream)98 buffer = [0, 1, 2]99 transform_buffer = mock.Mock(100 side_effect=[buffer, buffer, buffer, BufferStream.END])101 source_transformer._transform_buffer = transform_buffer102 source_transformer.transform(mock.Mock())103 self.assertEqual([0, 1, 2], [104 output_stream.add_indexed_buffer.call_args_list[i][ARGS][0].index105 for i in range(output_stream.add_indexed_buffer.call_count)106 ])107 def test_transform_calls_end_stream(self):108 source_transformer = SourceTransformer()109 output_stream = mock.Mock()110 source_transformer.set_output_stream(output_stream)111 transform_buffer = mock.Mock(side_effect=[BufferStream.END])112 source_transformer._transform_buffer = transform_buffer113 source_transformer.transform(mock.Mock())114 self.assertTrue(output_stream.end_stream.called)115class SequentialTransformerTest(unittest.TestCase):116 """Unit tests the SequentialTransformer class."""117 def test_send_buffers_updates_next_index_on_buffer_list(self):118 sequential_transformer = SequentialTransformer()119 sequential_transformer._next_index = 10120 expected_next_index = 15121 sequential_transformer._send_buffers(BufferList([[]] * 5))122 self.assertEqual(expected_next_index,123 sequential_transformer._next_index)124 def test_send_buffers_updates_next_index_on_single_buffer(self):125 sequential_transformer = SequentialTransformer()126 sequential_transformer._next_index = 10127 expected_next_index = 11128 sequential_transformer._send_buffers([])129 self.assertEqual(expected_next_index,130 sequential_transformer._next_index)131 def test_send_buffers_sends_buffer_list_with_correct_indexes(self):132 buffers_to_send = [133 [1],134 [1, 2],135 [1, 2, 3],136 [1, 2, 3, 4],137 [1, 2, 3, 4, 5],138 ]139 sequential_transformer = SequentialTransformer()140 output_stream = mock.Mock()141 sequential_transformer.set_output_stream(output_stream)142 sequential_transformer._send_buffers(BufferList(buffers_to_send))143 for expected_index, expected_buffer in enumerate(buffers_to_send):144 call = output_stream.add_indexed_buffer.call_args_list[145 expected_index]146 self.assertEqual(expected_index, call[ARGS][0].index)147 self.assertEqual(expected_buffer, call[ARGS][0].buffer)148 def test_transform_breaks_upon_buffer_stream_end_received(self):149 sequential_transformer = SequentialTransformer()150 output_stream = mock.Mock()151 input_stream = mock.Mock()152 sequential_transformer.set_output_stream(output_stream)153 input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]154 sequential_transformer._transform(input_stream)155 self.assertFalse(output_stream.add_indexed_buffer.called)156 def test_transform_closes_output_stream_when_finished(self):157 sequential_transformer = SequentialTransformer()158 output_stream = mock.Mock()159 input_stream = mock.Mock()160 sequential_transformer.set_output_stream(output_stream)161 input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]162 sequential_transformer._transform(input_stream)163 self.assertTrue(output_stream.end_stream.called)164class ParallelTransformerTest(unittest.TestCase):165 """Unit tests the ParallelTransformer class."""166 def test_transform_breaks_upon_buffer_stream_end_received(self):167 parallel_transformer = ParallelTransformer()168 output_stream = mock.Mock()169 input_stream = mock.Mock()170 parallel_transformer.set_output_stream(output_stream)171 input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]172 parallel_transformer._transform(input_stream)173 self.assertFalse(output_stream.add_indexed_buffer.called)174 def test_transform_closes_output_stream_when_finished(self):175 parallel_transformer = ParallelTransformer()176 output_stream = mock.Mock()177 input_stream = mock.Mock()178 parallel_transformer.set_output_stream(output_stream)179 input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]180 parallel_transformer._transform(input_stream)181 self.assertTrue(output_stream.end_stream.called)182 def test_transform_passes_indexed_buffer_with_updated_buffer(self):183 expected_buffer = [0, 1, 2, 3, 4]184 expected_index = 12345185 parallel_transformer = ParallelTransformer()186 output_stream = mock.Mock()187 input_stream = mock.Mock()188 parallel_transformer.set_output_stream(output_stream)189 input_stream.remove_indexed_buffer.side_effect = [190 IndexedBuffer(expected_index, []), BufferStream.END191 ]192 parallel_transformer._transform_buffer = lambda _: expected_buffer193 parallel_transformer._transform(input_stream)194 self.assertEqual(195 expected_buffer,196 output_stream.add_indexed_buffer.call_args_list[0][ARGS][0].buffer)197 self.assertEqual(198 expected_index,199 output_stream.add_indexed_buffer.call_args_list[0][ARGS][0].index)200if __name__ == '__main__':...
yaml_helper.py
Source:yaml_helper.py
...8 def _loader(self, stream):9 """Vault related tests will want to override this.10 Vault cases should setup a AnsibleLoader that has the vault password."""11 return AnsibleLoader(stream)12 def _dump_stream(self, obj, stream, dumper=None):13 """Dump to a py2-unicode or py3-string stream."""14 if PY3:15 return yaml.dump(obj, stream, Dumper=dumper)16 else:17 return yaml.dump(obj, stream, Dumper=dumper, encoding=None)18 def _dump_string(self, obj, dumper=None):19 """Dump to a py2-unicode or py3-string"""20 if PY3:21 return yaml.dump(obj, Dumper=dumper)22 else:23 return yaml.dump(obj, Dumper=dumper, encoding=None)24 def _dump_load_cycle(self, obj):25 # Each pass though a dump or load revs the 'generation'26 # obj to yaml string27 string_from_object_dump = self._dump_string(obj, dumper=AnsibleDumper)28 # wrap a stream/file like StringIO around that yaml29 stream_from_object_dump = io.StringIO(string_from_object_dump)30 loader = self._loader(stream_from_object_dump)31 # load the yaml stream to create a new instance of the object (gen 2)32 obj_2 = loader.get_data()33 # dump the gen 2 objects directory to strings34 string_from_object_dump_2 = self._dump_string(obj_2,35 dumper=AnsibleDumper)36 # The gen 1 and gen 2 yaml strings37 self.assertEquals(string_from_object_dump, string_from_object_dump_2)38 # the gen 1 (orig) and gen 2 py object39 self.assertEquals(obj, obj_2)40 # again! gen 3... load strings into py objects41 stream_3 = io.StringIO(string_from_object_dump_2)42 loader_3 = self._loader(stream_3)43 obj_3 = loader_3.get_data()44 string_from_object_dump_3 = self._dump_string(obj_3, dumper=AnsibleDumper)45 self.assertEquals(obj, obj_3)46 # should be transitive, but...47 self.assertEquals(obj_2, obj_3)48 self.assertEquals(string_from_object_dump, string_from_object_dump_3)49 def _old_dump_load_cycle(self, obj):50 '''Dump the passed in object to yaml, load it back up, dump again, compare.'''51 stream = io.StringIO()52 yaml_string = self._dump_string(obj, dumper=AnsibleDumper)53 self._dump_stream(obj, stream, dumper=AnsibleDumper)54 yaml_string_from_stream = stream.getvalue()55 # reset stream56 stream.seek(0)57 loader = self._loader(stream)58 # loader = AnsibleLoader(stream, vault_password=self.vault_password)59 obj_from_stream = loader.get_data()60 stream_from_string = io.StringIO(yaml_string)61 loader2 = self._loader(stream_from_string)62 # loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password)63 obj_from_string = loader2.get_data()64 stream_obj_from_stream = io.StringIO()65 stream_obj_from_string = io.StringIO()66 if PY3:67 yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper)...
test_stream.py
Source:test_stream.py
1from __future__ import absolute_import, division, unicode_literals2from . import support # flake8: noqa3import unittest4import codecs5from html5lib.inputstream import HTMLInputStream, HTMLUnicodeInputStream, HTMLBinaryInputStream6class HTMLUnicodeInputStreamShortChunk(HTMLUnicodeInputStream):7 _defaultChunkSize = 28class HTMLBinaryInputStreamShortChunk(HTMLBinaryInputStream):9 _defaultChunkSize = 210class HTMLInputStreamTest(unittest.TestCase):11 def test_char_ascii(self):12 stream = HTMLInputStream(b"'", encoding='ascii')13 self.assertEqual(stream.charEncoding[0], 'ascii')14 self.assertEqual(stream.char(), "'")15 def test_char_utf8(self):16 stream = HTMLInputStream('\u2018'.encode('utf-8'), encoding='utf-8')17 self.assertEqual(stream.charEncoding[0], 'utf-8')18 self.assertEqual(stream.char(), '\u2018')19 def test_char_win1252(self):20 stream = HTMLInputStream("\xa9\xf1\u2019".encode('windows-1252'))21 self.assertEqual(stream.charEncoding[0], 'windows-1252')22 self.assertEqual(stream.char(), "\xa9")23 self.assertEqual(stream.char(), "\xf1")24 self.assertEqual(stream.char(), "\u2019")25 def test_bom(self):26 stream = HTMLInputStream(codecs.BOM_UTF8 + b"'")27 self.assertEqual(stream.charEncoding[0], 'utf-8')28 self.assertEqual(stream.char(), "'")29 def test_utf_16(self):30 stream = HTMLInputStream((' ' * 1025).encode('utf-16'))31 self.assertTrue(stream.charEncoding[0] in ['utf-16-le', 'utf-16-be'], stream.charEncoding)32 self.assertEqual(len(stream.charsUntil(' ', True)), 1025)33 def test_newlines(self):34 stream = HTMLBinaryInputStreamShortChunk(codecs.BOM_UTF8 + b"a\nbb\r\nccc\rddddxe")35 self.assertEqual(stream.position(), (1, 0))36 self.assertEqual(stream.charsUntil('c'), "a\nbb\n")37 self.assertEqual(stream.position(), (3, 0))38 self.assertEqual(stream.charsUntil('x'), "ccc\ndddd")39 self.assertEqual(stream.position(), (4, 4))40 self.assertEqual(stream.charsUntil('e'), "x")41 self.assertEqual(stream.position(), (4, 5))42 def test_newlines2(self):43 size = HTMLUnicodeInputStream._defaultChunkSize44 stream = HTMLInputStream("\r" * size + "\n")45 self.assertEqual(stream.charsUntil('x'), "\n" * size)46 def test_position(self):47 stream = HTMLBinaryInputStreamShortChunk(codecs.BOM_UTF8 + b"a\nbb\nccc\nddde\nf\ngh")48 self.assertEqual(stream.position(), (1, 0))49 self.assertEqual(stream.charsUntil('c'), "a\nbb\n")50 self.assertEqual(stream.position(), (3, 0))51 stream.unget("\n")52 self.assertEqual(stream.position(), (2, 2))53 self.assertEqual(stream.charsUntil('c'), "\n")54 self.assertEqual(stream.position(), (3, 0))55 stream.unget("\n")56 self.assertEqual(stream.position(), (2, 2))57 self.assertEqual(stream.char(), "\n")58 self.assertEqual(stream.position(), (3, 0))59 self.assertEqual(stream.charsUntil('e'), "ccc\nddd")60 self.assertEqual(stream.position(), (4, 3))61 self.assertEqual(stream.charsUntil('h'), "e\nf\ng")62 self.assertEqual(stream.position(), (6, 1))63 def test_position2(self):64 stream = HTMLUnicodeInputStreamShortChunk("abc\nd")65 self.assertEqual(stream.position(), (1, 0))66 self.assertEqual(stream.char(), "a")67 self.assertEqual(stream.position(), (1, 1))68 self.assertEqual(stream.char(), "b")69 self.assertEqual(stream.position(), (1, 2))70 self.assertEqual(stream.char(), "c")71 self.assertEqual(stream.position(), (1, 3))72 self.assertEqual(stream.char(), "\n")73 self.assertEqual(stream.position(), (2, 0))74 self.assertEqual(stream.char(), "d")75 self.assertEqual(stream.position(), (2, 1))76def buildTestSuite():77 return unittest.defaultTestLoader.loadTestsFromName(__name__)78def main():79 buildTestSuite()80 unittest.main()81if __name__ == '__main__':...
_metadata.py
Source:_metadata.py
1# This file is generated by objective.metadata2#3# Last update: Mon Nov 23 12:35:40 20154import objc, sys5if sys.maxsize > 2 ** 32:6 def sel32or64(a, b): return b7else:8 def sel32or64(a, b): return a9if sys.byteorder == 'little':10 def littleOrBig(a, b): return a11else:12 def littleOrBig(a, b): return b13misc = {14}15misc.update({'FSEventStreamContext': objc.createStructType('FSEventStreamContext', b'{FSEventStreamContext=l^v^?^?^?}', [])})16constants = '''$$'''17enums = '''$kFSEventStreamCreateFlagFileEvents@16$kFSEventStreamCreateFlagIgnoreSelf@8$kFSEventStreamCreateFlagMarkSelf@32$kFSEventStreamCreateFlagNoDefer@2$kFSEventStreamCreateFlagNone@0$kFSEventStreamCreateFlagUseCFTypes@1$kFSEventStreamCreateFlagWatchRoot@4$kFSEventStreamEventFlagEventIdsWrapped@8$kFSEventStreamEventFlagHistoryDone@16$kFSEventStreamEventFlagItemChangeOwner@16384$kFSEventStreamEventFlagItemCreated@256$kFSEventStreamEventFlagItemFinderInfoMod@8192$kFSEventStreamEventFlagItemInodeMetaMod@1024$kFSEventStreamEventFlagItemIsDir@131072$kFSEventStreamEventFlagItemIsFile@65536$kFSEventStreamEventFlagItemIsHardlink@1048576$kFSEventStreamEventFlagItemIsLastHardlink@2097152$kFSEventStreamEventFlagItemIsSymlink@262144$kFSEventStreamEventFlagItemModified@4096$kFSEventStreamEventFlagItemRemoved@512$kFSEventStreamEventFlagItemRenamed@2048$kFSEventStreamEventFlagItemXattrMod@32768$kFSEventStreamEventFlagKernelDropped@4$kFSEventStreamEventFlagMount@64$kFSEventStreamEventFlagMustScanSubDirs@1$kFSEventStreamEventFlagNone@0$kFSEventStreamEventFlagOwnEvent@524288$kFSEventStreamEventFlagRootChanged@32$kFSEventStreamEventFlagUnmount@128$kFSEventStreamEventFlagUserDropped@2$kFSEventStreamEventIdSinceNow@18446744073709551615$'''18misc.update({})19functions={'FSEventStreamShow': (b'v^{__FSEventStream=}',), 'FSEventStreamGetLatestEventId': (b'Q^{__FSEventStream=}',), 'FSEventStreamRetain': (b'v^{__FSEventStream=}',), 'FSEventStreamSetDispatchQueue': (b'v^{__FSEventStream=}^{dispatch_queue_s=}',), 'FSEventsCopyUUIDForDevice': (b'^{__CFUUID=}i', '', {'retval': {'already_retained': True}}), 'FSEventStreamSetExclusionPaths': (b'v^{__FSEventStream=}^{__CFArray=}', '', {'retval': {'type': 'Z'}}), 'FSEventStreamScheduleWithRunLoop': (b'v^{__FSEventStream=}^{__CFRunLoop=}^{__CFString=}',), 'FSEventStreamInvalidate': (b'v^{__FSEventStream=}',), 'FSEventStreamStop': (b'v^{__FSEventStream=}',), 'FSEventsPurgeEventsForDeviceUpToEventId': (b'ZiQ',), 'FSEventStreamGetDeviceBeingWatched': (b'i^{__FSEventStream=}',), 'FSEventStreamCopyDescription': (b'^{__CFString=}^{__FSEventStream=}', '', {'retval': {'already_retained': True}}), 'FSEventStreamCopyPathsBeingWatched': (b'^{__CFArray=}^{__FSEventStream=}', '', {'retval': {'already_cfretained': True}}), 'FSEventStreamUnscheduleFromRunLoop': (b'v^{__FSEventStream=}^{__CFRunLoop=}^{__CFString=}',), 'FSEventStreamRelease': (b'v^{__FSEventStream=}',), 'FSEventStreamStart': (b'Z^{__FSEventStream=}',), 'FSEventStreamFlushSync': (b'v^{__FSEventStream=}',), 'FSEventsGetLastEventIdForDeviceBeforeTime': (b'Qid',), 'FSEventStreamFlushAsync': (b'Q^{__FSEventStream=}',), 'FSEventsGetCurrentEventId': (b'Q',)}20misc.update({'FSEventStreamRef': objc.createOpaquePointerType('FSEventStreamRef', b'^{__FSEventStream=}')})21expressions = {}...
Using AI Code Generation
1const { chromium } = require('playwright');2const fs = require('fs');3(async () => {4 const browser = await chromium.launch();5 const context = await browser.newContext();6 const page = await context.newPage();7 const stream = fs.createWriteStream('image.png');8 await page.screenshot({ path: 'image.png' });9 await page.screenshot({ stream });10 await browser.close();11})();12const { chromium } = require('playwright');13const fs = require('fs');14(async () => {15 const browser = await chromium.launch();16 const context = await browser.newContext();17 const page = await context.newPage();18 const stream = fs.createWriteStream('image.png');19 await page.screenshot({ path: 'image.png' });20 await page.screenshot({ stream });21 await browser.close();22})();23const { chromium } = require('playwright');24const fs = require('fs');25(async () => {26 const browser = await chromium.launch();27 const context = await browser.newContext();28 const page = await context.newPage();29 const stream = fs.createWriteStream('image.png');30 await page.screenshot({ path: 'image.png' });31 await page.screenshot({ stream });32 await browser.close();33})();34const { chromium } = require('playwright');35const fs = require('fs');36(async () => {37 const browser = await chromium.launch();38 const context = await browser.newContext();39 const page = await context.newPage();40 const stream = fs.createWriteStream('image.png');41 await page.screenshot({ path: 'image.png' });42 await page.screenshot({ stream });43 await browser.close();44})();
Using AI Code Generation
1const { chromium } = require("playwright");2const fs = require("fs");3(async () => {4 const browser = await chromium.launch();5 const context = await browser.newContext();6 const page = await context.newPage();7 const stream = await page.screenshotStream();8 const fileStream = fs.createWriteStream("screenshot.png");9 stream.pipe(fileStream);10 await browser.close();11})();12const { chromium } = require("playwright");13const fs = require("fs");14(async () => {15 const browser = await chromium.launch();16 const context = await browser.newContext();17 const page = await context.newPage();18 const stream = await page.screenshotStream();19 const fileStream = fs.createWriteStream("screenshot.png");20 stream.pipe(fileStream);21 await browser.close();22})();
Using AI Code Generation
1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch({ headless: false });4 const context = await browser.newContext();5 const page = await context.newPage();6 const stream = await page.video().createStream();7 const file = fs.createWriteStream('video.webm');8 stream.pipe(file);9 await page.waitForTimeout(3000);10 await browser.close();11})();12const { chromium } = require('playwright');13(async () => {14 const browser = await chromium.launch({ headless: false });15 const context = await browser.newContext();16 const page = await context.newPage();17 const stream = await page.video().createStream();18 const file = fs.createWriteStream('video.webm');19 stream.pipe(file);20 await page.waitForTimeout(3000);21 await browser.close();22})();23const { chromium } = require('playwright');24(async () => {25 const browser = await chromium.launch({ headless: false });26 const context = await browser.newContext();27 const page = await context.newPage();28 const stream = await page.video().createStream();29 const file = fs.createWriteStream('video.webm');30 stream.pipe(file);31 await page.waitForTimeout(3000);32 await browser.close();33})();34const { chromium } = require('playwright');35(async () => {36 const browser = await chromium.launch({ headless: false });37 const context = await browser.newContext();38 const page = await context.newPage();39 const stream = await page.video().createStream();40 const file = fs.createWriteStream('video.webm');41 stream.pipe(file);42 await page.waitForTimeout(3000);43 await browser.close();44})();45const { chromium } = require('playwright');46(async () => {47 const browser = await chromium.launch({ headless:
Using AI Code Generation
1const playwright = require('playwright');2(async () => {3 const browser = await playwright.chromium.launch();4 const context = await browser.newContext();5 const page = await context.newPage();6 await page.waitForSelector('video');7 const video = await page.$('video');
Using AI Code Generation
1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch({ headless: false });4 const context = await browser.newContext();5 const page = await context.newPage();6 page.on('request', (request) => {7 console.log('Request', request.url());8 });9 page.on('requestfinished', (request) => {10 console.log('Request Finished', request.url());11 });12 page.on('requestfailed', (request) => {13 console.log('Request Failed', request.url());14 });15 await page.screenshot({ path: 'google.png' });16 await browser.close();17})();
Using AI Code Generation
1const { chromium, devices } = require('playwright');2(async () => {3 const browser = await chromium.launch({ headless: false });4 const context = await browser.newContext();5 const page = await context.newPage();6 await page.waitForSelector('button#button[aria-label="Play (k)"]');7 await page.click('button#button[aria-label="Play (k)"]');8 await page.waitForSelector('button#button[aria-label="Pause (k)"]');9 const video = await page.$('video');10 const videoStream = await video.stream();11 const stream = require('stream');12 const fs = require('fs');13 const output = fs.createWriteStream('video.mp4');14 const pipeline = util.promisify(stream.pipeline);15 await pipeline(videoStream, output);16 await browser.close();17})();18const { chromium, devices } = require('playwright');19(async () => {20 const browser = await chromium.launch({ headless: false });21 const context = await browser.newContext();22 const page = await context.newPage();23 await page.waitForSelector('button#button[aria-label="Play (k)"]');24 await page.click('button#button[aria-label="Play (k)"]');25 await page.waitForSelector('button#button[aria-label="Pause (k)"]');26 const video = await page.$('video');27 const videoStream = await video.stream();28 const stream = require('stream');29 const fs = require('fs');30 const output = fs.createWriteStream('video.mp4');31 const pipeline = util.promisify(stream.pipeline);32 await pipeline(videoStream, output);33 await browser.close();34})();
Using AI Code Generation
1const { Stream } = require('@playwright/test');2const fs = require('fs');3const path = require('path');4const { chromium } = require('playwright');5const outputVideo = fs.createWriteStream(path.join(__dirname, 'output.mp4'));6const video = new Stream(outputVideo);7(async () => {8 const browser = await chromium.launch();9 const context = await browser.newContext({ video: 'retain-on-failure' });10 const page = await context.newPage();11 await page.screenshot({ path: `example.png` });12 await browser.close();13})();14outputVideo.on('finish', () => {15 console.log('Video saved');16});17const { Stream } = require('@playwright/test');18const fs = require('fs');19const path = require('path');20const { chromium } = require('playwright');21const outputVideo = fs.createWriteStream(path.join(__dirname, 'output.mp4'));22const video = new Stream(outputVideo);23(async () => {24 const browser = await chromium.launch();25 const context = await browser.newContext({ video });26 const page = await context.newPage();27 await page.screenshot({ path: `example.png` });28 await browser.close();29})();30outputVideo.on('finish', () => {31 console.log('Video saved');32});33const { Stream } = require('@playwright/test');34const fs = require('fs');35const path = require('path');36const { chromium } = require('playwright');37const outputVideo = fs.createWriteStream(path.join(__dirname, 'output.mp4'));38const video = new Stream(outputVideo, {
Using AI Code Generation
1const { chromium } = require('playwright');2const fs = require('fs');3(async () => {4 const browser = await chromium.launch();5 const context = await browser.newContext();6 const page = await context.newPage();7 const stream = fs.createWriteStream('example.html');8 await response.body().pipeTo(stream);9 await browser.close();10})();11const { chromium } = require('playwright');12const fs = require('fs');13(async () => {14 const browser = await chromium.launch();15 const context = await browser.newContext();16 const page = await context.newPage();17 const stream = fs.createWriteStream('example.html');18 await response.body().pipeTo(stream);19 await browser.close();20})();21const { chromium } = require('playwright');22const fs = require('fs');23(async () => {24 const browser = await chromium.launch();25 const context = await browser.newContext();26 const page = await context.newPage();27 const stream = fs.createWriteStream('example.html');28 await response.body().pipeTo(stream);29 await browser.close();30})();31const { chromium } = require('playwright');32const fs = require('fs');33(async () => {34 const browser = await chromium.launch();35 const context = await browser.newContext();36 const page = await context.newPage();37 const stream = fs.createWriteStream('example.html');38 await response.body().pipe
LambdaTest’s Playwright tutorial will give you a broader idea about the Playwright automation framework, its unique features, and use cases with examples to exceed your understanding of Playwright testing. This tutorial will give A to Z guidance, from installing the Playwright framework to some best practices and advanced concepts.
Get 100 minutes of automation test minutes FREE!!