Best Python code snippet using localstack_python
34089_s3_listener.py
Source:34089_s3_listener.py
...33 match = re.match(regex, action_string)34 if match:35 return match36 return False37def filter_rules_match(filters, object_path):38 """ check whether the given object path matches all of the given filters """39 filters = filters or {}40 key_filter = filters.get('S3Key', {})41 for rule in key_filter.get('FilterRule', []):42 if rule['Name'] == 'prefix':43 if not prefix_with_slash(object_path).startswith(prefix_with_slash(rule['Value'])):44 return False45 elif rule['Name'] == 'suffix':46 if not object_path.endswith(rule['Value']):47 return False48 else:49 LOGGER.warning('Unknown filter name: "%s"' % rule['Name'])50 return True51def prefix_with_slash(s):52 return s if s[0] == '/' else '/%s' % s53def get_event_message(event_name, bucket_name, file_name='testfile.txt', file_size=1024):54 # Based on: http://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html55 return {56 'Records': [{57 'eventVersion': '2.0',58 'eventSource': 'aws:s3',59 'awsRegion': DEFAULT_REGION,60 'eventTime': timestamp(format=TIMESTAMP_FORMAT_MILLIS),61 'eventName': event_name,62 'userIdentity': {63 'principalId': 'AIDAJDPLRKLG7UEXAMPLE'64 },65 'requestParameters': {66 'sourceIPAddress': '127.0.0.1' # TODO determine real source IP67 },68 'responseElements': {69 'x-amz-request-id': short_uid(),70 'x-amz-id-2': 'eftixk72aD6Ap51TnqcoF8eFidJG9Z/2' # Amazon S3 host that processed the request71 },72 's3': {73 's3SchemaVersion': '1.0',74 'configurationId': 'testConfigRule',75 'bucket': {76 'name': bucket_name,77 'ownerIdentity': {78 'principalId': 'A3NL1KOZZKExample'79 },80 'arn': 'arn:aws:s3:::%s' % bucket_name81 },82 'object': {83 'key': file_name,84 'size': file_size,85 'eTag': 'd41d8cd98f00b204e9800998ecf8427e',86 'versionId': '096fKKXTRTtl3on89fVO.nfljtsv6qko',87 'sequencer': '0055AED6DCD90281E5'88 }89 }90 }]91 }92def queue_url_for_arn(queue_arn):93 sqs_client = aws_stack.connect_to_service('sqs')94 parts = queue_arn.split(':')95 return sqs_client.get_queue_url(QueueName=parts[5],96 QueueOwnerAWSAccountId=parts[4])['QueueUrl']97def send_notifications(method, bucket_name, object_path):98 for bucket, config in iteritems(S3_NOTIFICATIONS):99 if bucket == bucket_name:100 action = {'PUT': 'ObjectCreated', 'DELETE': 'ObjectRemoved'}[method]101 # TODO: support more detailed methods, e.g., DeleteMarkerCreated102 # http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html103 api_method = {'PUT': 'Put', 'DELETE': 'Delete'}[method]104 event_name = '%s:%s' % (action, api_method)105 if (event_type_matches(config['Event'], action, api_method) and106 filter_rules_match(config.get('Filter'), object_path)):107 # send notification108 message = get_event_message(109 event_name=event_name, bucket_name=bucket_name,110 file_name=urlparse.urlparse(object_path[1:]).path111 )112 message = json.dumps(message)113 if config.get('Queue'):114 sqs_client = aws_stack.connect_to_service('sqs')115 try:116 queue_url = queue_url_for_arn(config['Queue'])117 sqs_client.send_message(QueueUrl=queue_url, MessageBody=message)118 except Exception as e:119 LOGGER.warning('Unable to send notification for S3 bucket "%s" to SQS queue "%s": %s' %120 (bucket_name, config['Queue'], e))...
s3_listener.py
Source:s3_listener.py
...33 match = re.match(regex, action_string)34 if match:35 return match36 return False37def filter_rules_match(filters, object_path):38 ''' check whether the given object path matches all of the given filters '''39 filters = filters or {}40 key_filter = filters.get('S3Key', {})41 for rule in key_filter.get('FilterRule', []):42 if rule['Name'] == 'prefix':43 if not prefix_with_slash(object_path).startswith(prefix_with_slash(rule['Value'])):44 return False45 elif rule['Name'] == 'suffix':46 if not object_path.endswith(rule['Value']):47 return False48 else:49 LOGGER.warning('Unknown filter name: "%s"' % rule['Name'])50 return True51def prefix_with_slash(s):52 return s if s[0] == '/' else '/%s' % s53def get_event_message(event_name, bucket_name, file_name='testfile.txt', file_size=1024):54 # Based on: http://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html55 return {56 'Records': [{57 'eventVersion': '2.0',58 'eventSource': 'aws:s3',59 'awsRegion': DEFAULT_REGION,60 'eventTime': timestamp(format=TIMESTAMP_FORMAT_MILLIS),61 'eventName': event_name,62 'userIdentity': {63 'principalId': 'AIDAJDPLRKLG7UEXAMPLE'64 },65 'requestParameters': {66 'sourceIPAddress': '127.0.0.1' # TODO determine real source IP67 },68 'responseElements': {69 'x-amz-request-id': short_uid(),70 'x-amz-id-2': 'eftixk72aD6Ap51TnqcoF8eFidJG9Z/2' # Amazon S3 host that processed the request71 },72 's3': {73 's3SchemaVersion': '1.0',74 'configurationId': 'testConfigRule',75 'bucket': {76 'name': bucket_name,77 'ownerIdentity': {78 'principalId': 'A3NL1KOZZKExample'79 },80 'arn': 'arn:aws:s3:::%s' % bucket_name81 },82 'object': {83 'key': file_name,84 'size': file_size,85 'eTag': 'd41d8cd98f00b204e9800998ecf8427e',86 'versionId': '096fKKXTRTtl3on89fVO.nfljtsv6qko',87 'sequencer': '0055AED6DCD90281E5'88 }89 }90 }]91 }92def queue_url_for_arn(queue_arn):93 sqs_client = aws_stack.connect_to_service('sqs')94 parts = queue_arn.split(':')95 return sqs_client.get_queue_url(QueueName=parts[5],96 QueueOwnerAWSAccountId=parts[4])['QueueUrl']97def send_notifications(method, bucket_name, object_path):98 for bucket, config in iteritems(S3_NOTIFICATIONS):99 if bucket == bucket_name:100 action = {'PUT': 'ObjectCreated', 'DELETE': 'ObjectRemoved'}[method]101 # TODO: support more detailed methods, e.g., DeleteMarkerCreated102 # http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html103 api_method = {'PUT': 'Put', 'DELETE': 'Delete'}[method]104 event_name = '%s:%s' % (action, api_method)105 if (event_type_matches(config['Event'], action, api_method) and106 filter_rules_match(config.get('Filter'), object_path)):107 # send notification108 message = get_event_message(109 event_name=event_name, bucket_name=bucket_name,110 file_name=urlparse.urlparse(object_path[1:]).path111 )112 message = json.dumps(message)113 if config.get('Queue'):114 sqs_client = aws_stack.connect_to_service('sqs')115 try:116 queue_url = queue_url_for_arn(config['Queue'])117 sqs_client.send_message(QueueUrl=queue_url, MessageBody=message)118 except Exception as e:119 LOGGER.warning('Unable to send notification for S3 bucket "%s" to SQS queue "%s": %s' %120 (bucket_name, config['Queue'], e))...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!