Best Python code snippet using localstack_python
mongo_s3.py
Source:mongo_s3.py
1import argparse2import subprocess3import time4import datetime5import s3_multipart_upload6'''7Script to create mongodump and store into S38'''9def mongodump_full(tdir, exp_dir):10 '''Perform Mongodump'''11 mongodump_cmd = "mongodump --oplog -o " + tdir + "/" + exp_dir12 subprocess.check_output(mongodump_cmd, shell=True)13 return14def mongodump_oplog(tdir, exp_dir):15 '''Perform Mongodump OpLog Backup'''16 mongodump_cmd = "mongodump -d local -c oplog.rs -o " + tdir + "/" + exp_dir17 subprocess.check_output(mongodump_cmd, shell=True)18 return19def create_tar(tdir, exp_dir):20 '''Perform Compressed tar of the directory'''21 tar_fname = exp_dir + ".tbz2"22 tar_cmd = "cd " + tdir + ";tar -cjf " + tar_fname + " " + exp_dir23 subprocess.check_output(tar_cmd, shell=True)24 return(tar_fname)25def store_s3(tdir, name):26 """Store in S3"""27 # Use the .boto config for credentials28 bucket_name = 'flextrip-db-dumps'29 tnow = datetime.datetime.now()30 day = int(tnow.day)31 if "full" in name:32 if day >= 1 and day <= 7:33 kname = "monthly/"34 else:35 kname = "weekly/"36 else:37 kname = "daily/"38 key_name = kname + name39 fname = tdir + "/" + name40 #print kname, fname41 s3_multipart_upload.main(fname, bucket_name, s3_key_name=key_name, use_rr=False, make_public=False)42def cleanup(tdir, name):43 """Remove the Backup dir"""44 rm_dir_cmd = "cd " + tdir + ";rm -rf " + name45 subprocess.check_output(rm_dir_cmd, shell=True)46# Main Section47if __name__ == "__main__":48 # Read the script arguments49 parser = argparse.ArgumentParser(description='Script to Mongodump or Oplog dump and Store to S3')50 parser.add_argument("-o", action='store_true', help="Perform Opslog dump")51 args = parser.parse_args()52 oplog_flag = args.o53 # Main Part of the script54 tdir = "/backup"55 cur_time = time.time()56 timestamp = datetime.datetime.fromtimestamp(cur_time).strftime('%Y-%m-%d-%u-%H-%M-%S')57 # Perform Mongodump58 if not oplog_flag:59 exp_dir = "mongodump_full-" + timestamp60 mongodump_full(tdir, exp_dir)61 else:62 exp_dir = "mongodump_oplog-" + timestamp63 mongodump_oplog(tdir, exp_dir)64 # Create a tar compressed file65 tar_fname = create_tar(tdir, exp_dir)66 # Store file into S367 store_s3(tdir, tar_fname)68 # Remove the backup direcory...
main.py
Source:main.py
...8from StringIO import StringIO9def percent_cb(complete, total):10 sys.stdout.write('.')11 sys.stdout.flush()12def s3_multipart_upload(bucket, url, remote_filename):13 from urlparse import urlparse14 r = requests.get(url, stream=True)15 fn_remote = urlparse(remote_filename).path16 fn_remote_full = remote_filename17 print("Multi-Part upload...")18 print("From : %s" % url)19 print("To : %s" % fn_remote_full)20 buffer_size = 10 * 1024 * 102421 mp = bucket.initiate_multipart_upload(fn_remote)22 num_part = 123 try:24 for buf in r.iter_content(buffer_size):25 if not buf:26 break27 io = StringIO(buf)28 mp.upload_part_from_file(io, num_part, cb=percent_cb, num_cb=1, size=len(buf))29 num_part += 130 io.close()31 except IOError as e:32 mp.cancel_upload()33 raise e34 mp.complete_upload()35 print("")36 return fn_remote_full37def get_s3_working_dir(settings, s3_bucket, path=""):38 ps = settings39 glb_vars = ps.GlobalParam40 remote_path = os.path.normpath(os.path.join(s3_bucket.name, 'zetjob', glb_vars['userName'], "job%s" % glb_vars['jobId'], "blk%s" % glb_vars['blockId'], path))41 return os.path.join("s3://", remote_path)42def main():43 settings = get_settings_from_file("spec.json")44 print(settings)45 p = settings.Param46 s3_conn = boto.connect_s3(p.AWS_ACCESS_KEY_ID, p.AWS_ACCESS_KEY_SECRET)47 s3_bucket = s3_conn.get_bucket(p.S3_BUCKET)48 remote_filename = get_s3_working_dir(settings, s3_bucket, "OUTPUT_dest_s3/dest_s3")49 remote_filename_full = s3_multipart_upload(s3_bucket, p.SOURCE_URL, remote_filename)50 remote_dir = os.path.dirname(remote_filename_full)51 with open(settings.Output.dest_s3, "w") as f:52 f.write(remote_dir)53 print("Done")54if __name__ == "__main__":...
s3_download.py
Source:s3_download.py
1from cosmos.lib.ezflow.tool import Tool2#from json_utils import json_split1fastq3from os.path import join4class S3Download(Tool):5 inputs = ['fastq.gz']6 outputs = ['json']7 time_req = 12*608 mem_req = 30009 def cmd(self,i,s,p):10 return "python {S3_down} {p[gz_path]} bmi-ngs /Output_Cluster/file_prova", {11 'S3_down' : join (s['pipes'],'tools_pipe/s3_utils/s3_multipart_upload.py')}12 13 14class S3Upload(Tool):15 inputs = ['fastq.gz']16 outputs = ['json']17 time_req = 12*6018 mem_req = 300019 def cmd(self,i,s,p):20 return "python {S3_down} {p[gz_path]} bmi-ngs /Output_Cluster/file_prova", {21 'S3_down' : join (s['pipes'],'tools_pipe/s3_utils/s3_multipart_upload.py')}22 ...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!