Best Python code snippet using localstack_python
farmfix.py
Source: farmfix.py
1#!/usr/bin/env python32'''3This will cancel jobs and attempt to resubmit them where they were left off. This is mainly meant to fix any jobs4that are running on known bad nodes.5'''6import os7import sys8sys.path.append(os.environ['BEACON_ANALYSIS_DIR'])9import subprocess10import time11import numpy12import yaml13from pprint import pprint14def parseJobIDs(partition,user):15 '''16 Will return a dictionary of job ids for the set of jobs currently queued for user on partition.17 '''18 structured_dtype = numpy.dtype([('jobname', numpy.unicode_, 16), ('run', int), ('jobtype', numpy.unicode_, 16), ('jobid', int), ('node_reason', numpy.unicode_, 16)])19 out_array = numpy.array([], dtype=structured_dtype)20 text = subprocess.check_output(['squeue','--format="%.18i split %.30j split %R"','--user=dsouthall']).decode('utf-8')21 for line in text.replace(' ','').replace('"','').split('\n'):22 if 'JOBID' in line or len(line) == 0:23 continue24 try:25 # import pdb; pdb.set_trace()26 jobid = int(line.split('split')[0])27 jobname = str(line.split('split')[1])28 run = int(''.join(filter(str.isdigit, jobname)))29 jobtype = jobname.replace('bcn','').replace(str(run),'')30 31 node_reason = str(str(line.split('split')[2]))32 a = numpy.array([(jobname, run, jobtype, jobid, node_reason)], dtype=structured_dtype)33 out_array = numpy.append(out_array, a)34 except Exception as e:35 print(e)36 import pdb; pdb.set_trace()37 return out_array38if __name__ == "__main__":39 ###------------###40 ### Parameters ###41 ###------------###42 debug = False #Disables actually sending commands to bash43 username = 'dsouthall'44 partition = 'broadwl'45 deploy_index = '/home/dsouthall/Projects/Beacon/beacon/config/september_2021_minimized_calibration.json'46 bad_node_numbers = [15,227]47 bad_node_string = "--exclude=midway2-%s"%str(['{:04d}'.format(node) for node in bad_node_numbers]).replace("'","").replace(' ','')48 bad_node_list = ["midway2-{:04d}".format(node) for node in bad_node_numbers]49 ###--------###50 ### Script ###51 ###--------###52 out_array = parseJobIDs(partition,username)53 expected_jobname_order = ['ss','','hv','all'] #'bcn%i%s'%(run,expected_jobname_order[i])54 flagged_runs = numpy.unique(out_array['run'][numpy.isin(out_array['node_reason'], bad_node_list)])55 print('Number of flagged runs = ', len(flagged_runs))56 print(flagged_runs)57 print('Continue?')58 import pdb; pdb.set_trace()59 print('Are you sure?')60 import pdb; pdb.set_trace()61 # Execute each script, but assuming the they are dependant on order.62 first = os.path.join(os.environ['BEACON_ANALYSIS_DIR'], 'tools', 'sine_subtract_cache.py')63 second = os.path.join(os.environ['BEACON_ANALYSIS_DIR'], 'analysis', 'all_analysis_part1.sh')64 third = os.path.join(os.environ['BEACON_ANALYSIS_DIR'], 'analysis', 'all_analysis_part2.sh')65 for run in flagged_runs:66 print('\nRun %i'%run)67 68 jobs_to_run = out_array[out_array['run'] == run]['jobtype']69 jobname = 'bcn%i'%run70 past_job_id = None71 for index in range(len(expected_jobname_order)):72 current_entry = out_array[numpy.logical_and(out_array['run'] == run, out_array['jobtype'] == expected_jobname_order[index])]73 if len(current_entry) == 1:74 current_entry = current_entry[0]75 if index == 0 and expected_jobname_order[index] in jobs_to_run:76 cancel_command = 'scancel %i'%(current_entry['jobid'])77 print('Cancelling current job:')78 print(cancel_command)79 if debug == False:80 # print('Is this okay?')81 # import pdb; pdb.set_trace()82 subprocess.Popen(cancel_command.split(' '))83 #Prepare Sine Subtraction84 batch = 'sbatch --partition=%s %s --job-name=%s --time=12:00:00 '%(partition, bad_node_string, jobname + 'ss')85 command = first + ' %i'%(run)86 command_queue = batch + command87 #Submit sine subtraction and get the jobid88 print(command_queue)89 if debug == False:90 past_job_id = int(subprocess.check_output(command_queue.split(' ')).decode("utf-8").replace('Submitted batch job ','').replace('\n',''))91 elif index == 1 and expected_jobname_order[index] in jobs_to_run:92 cancel_command = 'scancel %i'%(current_entry['jobid'])93 print('Cancelling current job:')94 print(cancel_command)95 if debug == False:96 # print('Is this okay?')97 # import pdb; pdb.set_trace()98 subprocess.Popen(cancel_command.split(' '))99 if past_job_id is not None:100 #Prepare Non-Map Analysis101 batch = 'sbatch --partition=%s %s --job-name=%s --time=36:00:00 --dependency=afterok:%i '%(partition, bad_node_string, jobname, past_job_id)102 command = '%s %i'%(second, run)103 command_queue = batch + command104 #Submit Non-Map Analysis and get the jobid105 print(command_queue)106 if debug == False:107 past_job_id = int(subprocess.check_output(command_queue.split(' ')).decode("utf-8").replace('Submitted batch job ','').replace('\n',''))108 else:109 #Prepare Non-Map Analysis110 batch = 'sbatch --partition=%s %s --job-name=%s --time=36:00:00 '%(partition, bad_node_string, jobname)111 command = '%s %i'%(second, run)112 command_queue = batch + command113 #Submit Non-Map Analysis and get the jobid114 print(command_queue)115 if debug == False:116 past_job_id = int(subprocess.check_output(command_queue.split(' ')).decode("utf-8").replace('Submitted batch job ','').replace('\n',''))117 elif index == 2 and expected_jobname_order[index] in jobs_to_run:118 cancel_command = 'scancel %i'%(current_entry['jobid'])119 print('Cancelling current job:')120 print(cancel_command)121 if debug == False:122 # print('Is this okay?')123 # import pdb; pdb.set_trace()124 subprocess.Popen(cancel_command.split(' '))125 if past_job_id is not None:126 #Prepare Maps for H and V pol Job127 batch = 'sbatch --partition=%s %s --job-name=%s --time=36:00:00 --dependency=afterok:%i '%(partition, bad_node_string, jobname+'hv', past_job_id)128 command = '%s %i %s %s'%(third, run, deploy_index, 'both')129 command_queue = batch + command130 #Submit hpol job and get the jobid to then submit vpol with dependency131 print(command_queue)132 if debug == False:133 past_job_id = int(subprocess.check_output(command_queue.split(' ')).decode("utf-8").replace('Submitted batch job ','').replace('\n',''))134 else:135 #Prepare Maps for H and V pol Job136 batch = 'sbatch --partition=%s %s --job-name=%s --time=36:00:00 '%(partition, bad_node_string, jobname+'hv')137 command = '%s %i %s %s'%(third, run, deploy_index, 'both')138 command_queue = batch + command139 #Submit hpol job and get the jobid to then submit vpol with dependency140 print(command_queue)141 if debug == False:142 past_job_id = int(subprocess.check_output(command_queue.split(' ')).decode("utf-8").replace('Submitted batch job ','').replace('\n',''))143 elif index == 3 and expected_jobname_order[index] in jobs_to_run:144 cancel_command = 'scancel %i'%(current_entry['jobid'])145 print('Cancelling current job:')146 print(cancel_command)147 if debug == False:148 # print('Is this okay?')149 # import pdb; pdb.set_trace()150 subprocess.Popen(cancel_command.split(' '))151 if past_job_id is not None:152 #All job must be done second, because "best map" selection is call when all is, so hv must already be done.153 #Prepare All Job154 batch = 'sbatch --partition=%s %s --job-name=%s --time=36:00:00 --dependency=afterok:%i '%(partition, bad_node_string, jobname+'all', past_job_id)155 command = '%s %i %s %s'%(third, run, deploy_index, 'all')156 command_queue = batch + command157 #Submit All job158 print(command_queue)159 if debug == False:160 past_job_id = int(subprocess.check_output(command_queue.split(' ')).decode("utf-8").replace('Submitted batch job ','').replace('\n',''))161 else:162 #All job must be done second, because "best map" selection is call when all is, so hv must already be done.163 #Prepare All Job164 batch = 'sbatch --partition=%s %s --job-name=%s --time=36:00:00 '%(partition, bad_node_string, jobname+'all')165 command = '%s %i %s %s'%(third, run, deploy_index, 'all')166 command_queue = batch + command167 #Submit All job168 print(command_queue)169 if debug == False:170 past_job_id = int(subprocess.check_output(command_queue.split(' ')).decode("utf-8").replace('Submitted batch job ','').replace('\n',''))171 if past_job_id is not None:...
FileDialog.py
Source: FileDialog.py
...158 def get_selection(self):159 file = self.selection.get()160 file = os.path.expanduser(file)161 return file162 def cancel_command(self, event=None):163 self.quit()164 def set_filter(self, dir, pat):165 if not os.path.isabs(dir):166 try:167 pwd = os.getcwd()168 except os.error:169 pwd = None170 if pwd:171 dir = os.path.join(pwd, dir)172 dir = os.path.normpath(dir)173 self.filter.delete(0, END)174 self.filter.insert(END, os.path.join(dir or os.curdir, pat or "*"))175 def set_selection(self, file):176 self.selection.delete(0, END)...
proxy.py
Source: proxy.py
1from abc import ABC, abstractmethod2from ja.common.proxy.proxy import SingleMessageProxy3from ja.common.message.base import Response4from ja.common.proxy.ssh import SSHConnection, ISSHConnection, SSHConfig5from ja.user.message.add import AddCommand6from ja.user.message.cancel import CancelCommand7from ja.user.message.query import QueryCommand8class IUserServerProxy(SingleMessageProxy, ABC):9 """10 Interface for the proxy for the central server used on the user client.11 """12 @abstractmethod13 def add_job(self, add_config: AddCommand) -> Response:14 """!15 @param add_config: Config specifying parameters for adding a job.16 @return: The Response from the Server.17 """18 @abstractmethod19 def cancel_job(self, cancel_command: CancelCommand) -> Response:20 """!21 @param cancel_command: Config specifying parameters for cancelling a22 job.23 @return: The Response from the Server.24 """25 @abstractmethod26 def query(self, query_command: QueryCommand) -> Response:27 """!28 @param query_command: Config specifying parameters for querying a job.29 @return: The Response from the Server.30 """31class UserServerProxyBase(IUserServerProxy, ABC):32 """33 Base class for the proxy for the central server used on the user client.34 """35 def add_job(self, add_command: AddCommand) -> Response:36 connection = self._get_ssh_connection(add_command.config.ssh_config)37 return connection.send_command(add_command)38 def cancel_job(self, cancel_command: CancelCommand) -> Response:39 connection = self._get_ssh_connection(cancel_command.config.ssh_config)40 return connection.send_command(cancel_command)41 def query(self, query_command: QueryCommand) -> Response:42 connection = self._get_ssh_connection(query_command.config.ssh_config)43 return connection.send_command(query_command)44class UserServerProxy(UserServerProxyBase):45 """46 Implementation for the proxy for the central server used on the user client.47 """48 def __init__(self, ssh_config: SSHConfig, remote_module: str = "/tmp/jobadder-server.socket",49 command_string: str = "ja-remote %s"):50 super().__init__(ssh_config=ssh_config)51 self._remote_module = remote_module52 self._command_string = command_string53 def _get_ssh_connection(self, ssh_config: SSHConfig) -> ISSHConnection:54 return SSHConnection(...
Check out the latest blogs from LambdaTest on this topic:
The fact is not alien to us anymore that cross browser testing is imperative to enhance your application’s user experience. Enhanced knowledge of popular and highly acclaimed testing frameworks goes a long way in developing a new app. It holds more significance if you are a full-stack developer or expert programmer.
QA testers have a unique role and responsibility to serve the customer. Serving the customer in software testing means protecting customers from application defects, failures, and perceived failures from missing or misunderstood requirements. Testing for known requirements based on documentation or discussion is the core of the testing profession. One unique way QA testers can both differentiate themselves and be innovative occurs when senseshaping is used to improve the application user experience.
Having a good web design can empower business and make your brand stand out. According to a survey by Top Design Firms, 50% of users believe that website design is crucial to an organization’s overall brand. Therefore, businesses should prioritize website design to meet customer expectations and build their brand identity. Your website is the face of your business, so it’s important that it’s updated regularly as per the current web design trends.
Enterprise resource planning (ERP) is a form of business process management software—typically a suite of integrated applications—that assists a company in managing its operations, interpreting data, and automating various back-office processes. The introduction of a new ERP system is analogous to the introduction of a new product into the market. If the product is not handled appropriately, it will fail, resulting in significant losses for the business. Most significantly, the employees’ time, effort, and morale would suffer as a result of the procedure.
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!