How to use no_such_bucket method in localstack

Best Python code snippet using localstack_python

test_cache.py

Source:test_cache.py Github

copy

Full Screen

1"""2Blizzard API CLI - TestCache class.3Vaughn Kottler 01/09/194"""5# built-in6import argparse7import os8import stat9import tempfile10import unittest11# internal12from blizzard_api.test.resources import get_item13# payload14from blizzard_api.cache import Cache, CacheError15#pylint:disable=too-few-public-methods16class DummyObject:17 """ For creating data that can't be serialized into JSON. """18class TestCache(unittest.TestCase):19 """ Testing the disk-based JSON cache. """20 def setUp(self):21 """ Create a cache object to be used by test cases. """22 self.cache = Cache(get_item("test_cache"))23 def tearDown(self):24 """ Restore test cache to its default state. """25 # make sure dummy bucket doesn't get deleted permanently26 if not self.cache.is_bucket("empty_bucket"):27 self.cache.add_bucket("empty_bucket")28 def test_is_bucket(self):29 """ Verify that bucket detection logic works. """30 self.assertTrue(self.cache.is_bucket("empty_bucket"))31 self.assertFalse(self.cache.is_bucket("bad_bucket"))32 self.cache.add_bucket("test_bucket")33 self.assertTrue(self.cache.is_bucket("test_bucket"))34 self.cache.remove_bucket("test_bucket")35 self.assertFalse(self.cache.is_bucket("test_bucket"))36 def test_remove_all_buckets(self):37 """ Verify that clearing the cache works. """38 self.cache.add_bucket("test_bucket1")39 self.cache.add_bucket("test_bucket2")40 self.cache.remove_all_buckets()41 self.assertFalse(self.cache.is_bucket("empty_bucket"))42 self.assertFalse(self.cache.is_bucket("test_bucket1"))43 self.assertFalse(self.cache.is_bucket("test_bucket2"))44 self.assertFalse(self.cache.get_bucket_names())45 def test_constructor_exceptions(self):46 """ Verify that constructor exceptions trigger when expected. """47 # check that illegal directories can't be created48 root_path = os.path.abspath(os.sep)49 self.assertRaises(argparse.ArgumentTypeError, Cache,50 os.path.join(root_path, "illegal_dir"))51 # make a temporary directory not writeable52 temp_dir_name = tempfile.mkdtemp()53 permissions = os.stat(temp_dir_name).st_mode54 permissions = permissions & ~stat.S_IWUSR55 os.chmod(temp_dir_name, permissions)56 self.assertRaises(argparse.ArgumentTypeError, Cache, temp_dir_name)57 os.rmdir(temp_dir_name)58 def test_general_exceptions(self):59 """60 Verify that exceptions checking state of buckets trigger when expected.61 """62 self.assertRaises(CacheError, self.cache.add_bucket, "empty_bucket")63 self.assertRaises(CacheError, self.cache.get_bucket, "no_such_bucket")64 self.assertRaises(CacheError, self.cache.remove_bucket, "no_such_bucket")65 self.assertRaises(CacheError, self.cache.add_item, "no_such_bucket", {})66 def test_bucket_contents(self):67 """ Verify that manipulating bucket contents works. """68 self.cache.add_bucket("test_bucket")69 # test that data can be written and read back70 item1 = "test String"71 item2 = {"string": item1}72 self.cache.add_item("test_bucket", item1)73 self.cache.add_item("test_bucket", item2)74 cache_contents = self.cache.get_bucket("test_bucket")75 self.assertEqual(cache_contents[0], item1)76 self.assertEqual(cache_contents[1], item2)77 # test that non-serializable data can't be written78 bad_object = DummyObject()79 self.assertRaises(CacheError, self.cache.add_item, "test_bucket", bad_object)...

Full Screen

Full Screen

s3.py

Source:s3.py Github

copy

Full Screen

1import boto32from botocore.exceptions import ClientError3from bosscat.utils import client_error_code, get_bucket_arn, try_client4NO_SUCH_BUCKET = 'NoSuchBucket'5BUCKET_ALREADY_EXISTS = 'BucketAlreadyExists'6BUCKET_ALREADY_OWNED_BY_YOU = 'BucketAlreadyOwnedByYou'7DEFAULT_CORS_DICT = {8 'CORSRules': [9 {10 'AllowedHeaders': ['Authorization'],11 'AllowedMethods': ['GET'],12 'AllowedOrigins': ['*'],13 'ExposeHeaders': [],14 'MaxAgeSeconds': 300015 }16 ]17 }18def ensure_bucket(bucket_name, region, bucket_policy=None, cors_dict=None):19 client = get_s3_client()20 if region == 'us-east-1':21 region = None22 try:23 if region:24 bucket = client.create_bucket(25 Bucket = bucket_name,26 CreateBucketConfiguration = {27 'LocationConstraint': region28 }29 )30 else:31 bucket = client.create_bucket(Bucket=bucket_name)32 except ClientError as ex:33 if client_error_code(ex) != BUCKET_ALREADY_OWNED_BY_YOU:34 raise35 if cors_dict:36 client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_dict)37 if bucket_policy:38 try_client(39 lambda: client.put_bucket_policy(40 Bucket=bucket_name,41 Policy=bucket_policy42 )43 )44def destroy_bucket(bucket_name):45 client = get_s3_client()46 try:47 while True:48 object_list = client.list_objects(Bucket=bucket_name)49 if not object_list.get('Contents'):50 break51 delete_list = [52 {'Key': obj['Key']}53 for obj in object_list['Contents']54 ]55 client.delete_objects(56 Bucket = bucket_name,57 Delete = {58 "Objects": delete_list,59 "Quiet": True60 }61 )62 client.delete_bucket(Bucket=bucket_name)63 except ClientError as ex:64 if client_error_code(ex) != NO_SUCH_BUCKET:65 raise(ex)66def get_bucket_region(bucket_name):67 client = get_s3_client()68 try:69 response = client.get_bucket_location(Bucket=bucket_name)70 region = response.get('LocationConstraint')71 return region or 'us-east-1'72 except ClientError as ex:73 if client_error_code(ex) != NO_SUCH_BUCKET:74 raise(ex)75 return None76def get_s3_client():77 return boto3.client(78 's3',79 config = boto3.session.Config(signature_version='s3v4')80 )81def upload_local_file_to_bucket(local_filename, bucket_name, key):82 client = get_s3_client()83 try_client(84 lambda: client.upload_file(local_filename, bucket_name, key)...

Full Screen

Full Screen

nestor_fileupload.py

Source:nestor_fileupload.py Github

copy

Full Screen

1### COMP264 - Cloud Machine Learning2### Nestor Romero 3011333313### Assignment 14import boto35import pathlib6import logging7from datetime import datetime8# Set up our logger9logging.basicConfig(level=logging.INFO)10logger = logging.getLogger()11#create service client12s3 = boto3.resource('s3')13bucket_name = 'content301133330'14try:15 # locate path where code is executing16 local_path = pathlib.Path(__file__).parent.resolve()17 logger.info(f'Folder to lookup files >> {local_path}\n')18 for id in range(1,4):19 # create filenames dynamically20 filename = f'test_text{id}.txt'21 logger.info(f'{filename} started uploading... (start: {datetime.now()})')22 result = s3.meta.client.upload_file(f'{local_path}/{filename}', bucket_name, filename)23 logger.info(f'{filename} is uploaded... (end: {datetime.now()})\n')24 25 26 logger.info('End of process\n')27except s3.meta.client.exceptions.NoSuchBucket as no_such_bucket:28 logging.error(no_such_bucket.response)29except Exception as ex: ...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful