Best Python code snippet using ATX
pfam_loader.py
Source:pfam_loader.py
...4Two different ways to load full data sets:5prepare_data_set(basedir = '/home/fagg/datasets/pfam', rotation = 0, nfolds = 5, ntrain_folds = 3)6 loads the raw CSV files, does the splitting and tokenization7OR8load_rotation(basedir = '/home/fagg/datasets/pfam', rotation=0)9 loads an already stored data set from a pickle file10'''11import tensorflow as tf12import pandas as pd13import numpy as np14import os15import fnmatch16import matplotlib.pyplot as plt17import random18import pickle19from tensorflow.keras.preprocessing.sequence import pad_sequences20from tensorflow import keras21def load_pfam_file(basedir, fold):22 '''23 Load a CSV file into a DataFrame24 :param basedir: Directory containing input files25 :param fold: Fold to load26 '''27 28 df = pd.read_csv('%s/pfamB_fold_%d.csv'%(basedir, fold))29 return df30def load_pfam_dataset(basedir = '/home/fagg/datasets/pfam', rotation = 0, nfolds = 5, ntrain_folds = 3):31 '''32 Load train/valid/test datasets into DataFrames33 :param basedir: Directory containing input files34 :param rotation: Rotation to load35 :param nfolds: Total number of folds36 :param ntrain_folds: Number of training folds to use37 :return: Dictionary containing the DataFrames38 '''39 train_folds = (np.arange(ntrain_folds) + rotation) % nfolds40 valid_folds = (np.array([ntrain_folds]) + rotation) % nfolds41 test_folds = (np.array([ntrain_folds]) + 1 + rotation) % nfolds42 train_dfs = [load_pfam_file(basedir, f) for f in train_folds]43 valid_dfs = [load_pfam_file(basedir, f) for f in valid_folds]44 test_dfs = [load_pfam_file(basedir, f) for f in test_folds]45 train_df = pd.concat(train_dfs, ignore_index=True)46 valid_df = pd.concat(valid_dfs, ignore_index=True)47 test_df = pd.concat(test_dfs, ignore_index=True)48 return {'train': train_df, 'valid': valid_df, 'test': test_df}49def prepare_data_set(basedir = '/home/fagg/datasets/pfam', rotation = 0, nfolds = 5, ntrain_folds = 3):50 '''51 Generate a full data set52 :param basedir: Directory containing input files53 :param rotation: Rotation to load54 :param nfolds: Total number of folds55 :param ntrain_folds: Number of training folds to use56 :return: Dictionary containing a full train/validation/test data set57 Dictionary format:58 ins_train: tokenized training inputs (examples x len_max)59 outs_train: tokenized training outputs (examples x 1). Values are 0 ... n_tokens-160 ins_valid: tokenized validation inputs (examples x len_max)61 outs_valid: tokenized validation outputs (examples x 1)62 ins_test: tokenized test inputs (examples x len_max)63 outs_test: tokenized test outputs (examples x 1)64 len_max: maximum length of a string65 n_tokens: Maximum number of output tokens 66 out_index_word: dictionary containing index -> class name map (note index is 1... n_toeksn)67 out_word_index: dictionary containing class name -> index map (note index is 1... n_toeksn)68 '''69 70 # Load the data from the disk71 dat = load_pfam_dataset(basedir=basedir, rotation=rotation, nfolds=nfolds, ntrain_folds=ntrain_folds)72 # Extract ins/outs73 dat_out = {}74 # Extract ins/outs for each dataset75 for k, df in dat.items():76 # Get the set of strings77 78 dat_out['ins_'+k] = df['string'].values79 dat_out['outs_'+k] = df['label'].values80 # Compute max length: only defined with respect to the training set81 len_max = np.max(np.array([len(s) for s in dat_out['ins_train']]))82 # TODO: Remove once testing complete83 test = pd.DataFrame(dat_out['outs_test'])84 print('tokenize fit...')85 # Convert strings to lists of indices86 tokenizer = keras.preprocessing.text.Tokenizer(char_level=True,87 filters='\t\n')88 tokenizer.fit_on_texts(dat_out['ins_train'])89 print('tokenize...')90 # Loop over all data sets91 for k in dat.keys():92 # Loop over all strings and tokenize93 seq = tokenizer.texts_to_sequences(dat_out['ins_'+k])94 dat_out['ins_'+k] = pad_sequences(seq, maxlen=len_max) # Pad out so all are the same length95 n_tokens = np.max(dat_out['ins_train']) + 296 print('outputs...')97 # Loop over all data sets: create tokenizer for output98 tokenizer = keras.preprocessing.text.Tokenizer(filters='\t\n')99 tokenizer.fit_on_texts(dat_out['outs_train']) # Essentially turns into label encoding100 # Tokenize all of the outputs101 for k in dat.keys():102 dat_out['outs_'+k] = np.array(tokenizer.texts_to_sequences(dat_out['outs_'+k]))-1103 #np.expand_dims(dat_out['outs_'+k], axis=-1)seq =104 105 #106 dat_out['len_max'] = len_max107 dat_out['n_tokens'] = n_tokens108 dat_out['out_index_word'] = tokenizer.index_word109 dat_out['out_word_index'] = tokenizer.word_index110 dat_out['rotation'] = rotation111 112 return dat_out113 114def save_data_sets(basedir = '/home/fagg/datasets/pfam', out_basedir = None, nfolds = 5, ntrain_folds = 3):115 '''116 Generate pickle files for all rotations.117 :param basedir: Directory containing input files118 :param out_basedir: Directory for output files (None -> use the basedir)119 :param nfolds: Total number of folds120 :param ntrain_folds: Number of training folds to use121 :param rotation: Rotation to load122 :return: Dictionary containing a full train/validation/test data set123 '''124 if out_basedir is None:125 out_basedir = basedir126 127 # Loop over all rotations128 for r in range(nfolds):129 # Load the rotation130 dat=prepare_data_set(basedir=basedir, rotation=r, nfolds=nfolds, ntrain_folds=ntrain_folds)131 # Write rotation to pickle file132 fname = '%s/pfamB_rotation_%d.pkl'%(basedir, r)133 with open(fname, 'wb') as fp:134 pickle.dump(dat, fp)135 136def load_rotation(basedir = '/home/fagg/datasets/pfam', rotation=0):137 '''138 Load a single rotation from a pickle file. These rotations are 5 folds, 3 training folds139 :param basedir: Directory containing files140 :param rotation: Rotation to load141 :return: Dictionary containing a full train/validation/test data set142 '''143 fname = '%s/pfamB_rotation_%d.pkl'%(basedir, rotation)144 with open(fname, 'rb') as fp:145 dat_out = pickle.load(fp)146 return dat_out147 return None148def create_tf_datasets(dat, batch=8, prefetch=None):149 '''150 Translate the data structure from load_rotation() or prepare_data_set() into a proper TF DataSet object151 for each of training, validation and testing. These act as configurable generators that can be used by152 model.fit(), .predict() and .evaluate()153 :param dat: Data structure from load_rotation() or prepare_data_set()154 :param batch: Batch size (int)155 :param prefetch: Number of batches to prefetch. (None = no prefetch)156 '''157 # Translate tensors into datasets158 dataset_train = tf.data.Dataset.from_tensor_slices((dat['ins_train'], dat['outs_train'])).batch(batch)159 dataset_valid = tf.data.Dataset.from_tensor_slices((dat['ins_valid'], dat['outs_valid'])).batch(batch)160 dataset_test = tf.data.Dataset.from_tensor_slices((dat['ins_test'], dat['outs_test'])).batch(batch)161 # Prefetch if specified162 if prefetch is not None:163 dataset_train = dataset_train.prefetch(prefetch)164 dataset_valid = dataset_valid.prefetch(prefetch)165 dataset_test = dataset_test.prefetch(prefetch)...
Referencer.py
Source:Referencer.py
1"""Common scripts, classes and functions2Copyright (C) 2021 Geoscan Ltd. https://www.geoscan.aero/3This program is free software: you can redistribute it and/or modify4it under the terms of the GNU General Public License as published by5the Free Software Foundation, either version 3 of the License, or6(at your option) any later version.7This program is distributed in the hope that it will be useful,8but WITHOUT ANY WARRANTY; without even the implied warranty of9MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the10GNU General Public License for more details.11You should have received a copy of the GNU General Public License12along with this program. If not, see <https://www.gnu.org/licenses/>.13"""14import os15import PhotoScan16try:17 from .sensor_tools import add_sensors_offset, remove_empty_sensors, LOCATION_REF_PATH18 from .flight_info_tools.ReferenceFile import open_unknown_reference_file, ReferenceXMLFile19except SystemError:20 from sensor_tools import add_sensors_offset, remove_empty_sensors, LOCATION_REF_PATH21 from flight_info_tools.ReferenceFile import open_unknown_reference_file, ReferenceXMLFile22class AbstractReferencer:23 """24 Abstract class for holding reference functionality25 """26 class _ReferenceProblems(set):27 """28 Represents set of unreferenced cameras29 """30 object_label = ''31 def report(self):32 """33 Creates report string34 :return: report str35 """36 count = len(self)37 if self:38 message = '{} are:\n'.format(self.object_label)39 message += '\n'.join(map(lambda x: x.label, sorted(self)[:5]))40 if count > 5:41 message += '\n...'42 message += '\nCount = {}'.format(count)43 else:44 message = 'There are no {}. Congrats!'.format(self.object_label.lower())45 return message46 class UnreferencedSensors(_ReferenceProblems):47 """48 Represents set of sensors without offset49 """50 object_label = 'Sensors without offset'51 class UnreferencedCameras(_ReferenceProblems):52 """53 Represents set of unreferenced cameras54 """55 object_label = 'Unreferenced cameras'56 def __init__(57 self, paths_to_reference, open_reference_file_func, offsets_ref_path, files_extensions, contains=None):58 self.offsets_ref_path = offsets_ref_path59 self._files = self._get_reference_files(paths_to_reference, files_extensions, contains)60 self._open_func = open_reference_file_func61 self._reference_loaded = False62 self._ref_dict = {}63 @staticmethod64 def _get_reference_files(paths_to_reference, files_extensions, contains):65 """66 Creates generator, which yields absolute path of file with same extension performed in self.extensions67 :return: Generator68 """69 def check_rule(p):70 """71 Checks extension of file. Case insensitive.72 :param p:73 :return:74 """75 name, ext = os.path.splitext(p.lower())76 correct_ext = ext in files_extensions77 if contains:78 return correct_ext and (contains in name)79 return correct_ext80 if contains:81 contains = contains.lower()82 if not isinstance(paths_to_reference, (list, set, tuple)):83 paths_to_reference = (paths_to_reference, )84 for main_path in paths_to_reference:85 for root, dirs, files in os.walk(main_path):86 for f in filter(check_rule, files):87 yield os.path.join(root, f)88 def _load_reference(self):89 """90 Loads reference91 """92 for path in self._files:93 print(path)94 reffile = self._open_func(path)95 ref_dict = {cam.name: cam for cam in reffile.cam_list}96 self._ref_dict.update(ref_dict)97 PhotoScan.app.update()98 def apply_offset(self, use_default_sensors: (None, dict) = None):99 """100 Split cameras by flight-camera (based on dir path). Applies offset and incline of sensor. Removes sensors101 without assigned cameras102 :param use_default_sensors.103 :return: UnreferencedSensors instance â unreferenced sensors104 """105 sensors_without_offset = add_sensors_offset(self.offsets_ref_path, use_default_sensors=use_default_sensors)106 remove_empty_sensors()107 return self.UnreferencedSensors(sensors_without_offset)108 @staticmethod109 def __apply4camera(ps_cam, ref_cam, load_rotation, load_accuracy):110 """111 Applies reference to camera from matched CameraRef instance112 :param ps_cam: PhotoScan.Camera113 :param ref_cam: CameraRef instance114 :param load_rotation: bool115 :param load_accuracy: bool116 :return: bool -- success of implementation117 """118 if ref_cam.has_location:119 ps_cam.reference.location = PhotoScan.Vector((ref_cam.x, ref_cam.y, ref_cam.alt))120 else:121 return False122 if load_rotation and ref_cam.has_rotation:123 ps_cam.reference.rotation = PhotoScan.Vector((ref_cam.yaw, ref_cam.pitch, ref_cam.roll))124 ps_cam.reference.rotation_accuracy = PhotoScan.Vector([10]*3)125 if load_accuracy and ref_cam.sd_alt:126 ps_cam.reference.location_accuracy = PhotoScan.Vector((ref_cam.sd_x, ref_cam.sd_y, ref_cam.sd_alt))127 return True128 def apply(self, load_rotation=True, load_accuracy=True):129 """130 Applies reference to cameras from matched CameraRef instances131 :param load_rotation: bool132 :param load_accuracy: bool133 :return: self.UnreferencedCameras instance â unreferenced cameras134 """135 if not self._reference_loaded:136 self._load_reference()137 unreferenced = self.UnreferencedCameras()138 for ps_cam in PhotoScan.app.document.chunk.cameras:139 ref_cam = self._ref_dict.get(ps_cam.label)140 if ref_cam is not None:141 success = self.__apply4camera(ps_cam, ref_cam, load_rotation, load_accuracy)142 else:143 success = False144 if not success:145 unreferenced.add(ps_cam)146 return unreferenced147class Referencer(AbstractReferencer):148 """149 CSV (txt) referencer150 """151 def __init__(self, paths_to_reference, offsets_ref_path=LOCATION_REF_PATH):152 super(Referencer, self).__init__(153 paths_to_reference=paths_to_reference,154 open_reference_file_func=open_unknown_reference_file,155 offsets_ref_path=offsets_ref_path,156 files_extensions={'.txt', '.csv'}157 )158class XMLReferencer(AbstractReferencer):159 """160 Agisoft XML referencer161 """162 def __init__(self, paths_to_reference, offsets_ref_path=LOCATION_REF_PATH):163 super(XMLReferencer, self).__init__(164 paths_to_reference=paths_to_reference,165 open_reference_file_func=ReferenceXMLFile.from_file,166 offsets_ref_path=offsets_ref_path,167 files_extensions={'.xml'},168 contains='GNSS'169 )170if __name__ == "__main__":171 import sys172 r = Referencer(sys.argv[1])173 # r = XMLReferencer(sys.argv[1])174 print(r.apply())...
gen_algo_supervisor.py
Source:gen_algo_supervisor.py
1from controller import Node,Supervisor,Keyboard,Emitter,Receiver2from population import *3superv = Supervisor()4timestep = int(superv.getBasicTimeStep())5superv.step(timestep)6sbr = superv.getFromDef("SBR")7load = superv.getFromDef("LOAD")8# The emitter to send genotype to SBR9emitter = superv.getDevice("emitter")10emitter.setChannel(1)11reciever = superv.getDevice("receiver")12reciever.enable(timestep)13reciever.setChannel(2)14#Establish Sync Between Emitter and Reciever15POPULATION_SIZE = 1016GENOTYPE_SIZE = 417NUM_GENERATIONS = 1518bounds = [(3,17),(-3,2),(100,180),(560,640)]19# !!!!! not sure20def run_seconds(t,reset_position=False):21 22 n = 1000*t/timestep23 start = superv.getTime()24 while superv.step(timestep) != -1: 25 if superv.getTime()-start>t:26 break27 if reset_position:28 restore_robot_position()29 sbr.resetPhysics()30 load.resetPhysics()31def getPerformanceData():32 global init_translation,init_rotation,load_init_translation,load_init_rotation33 emitter.send("return_fitness".encode('utf-8'))34 while superv.step(timestep) != -1: 35 if reciever.getQueueLength()>0:36 message = reciever.getData().decode('utf-8')37 reciever.nextPacket()38 angle_fitness = float(message)39 load_translation = load.getField("translation").getSFVec3f()40 load_rotation = load.getField("rotation").getSFRotation()41 load_t_cost = sum([(i1-i2)**2 for i1,i2 in zip(load_translation,load_init_translation)])42 load_r_cost = sum([(i1-i2)**2 for i1,i2 in zip(load_rotation,load_init_rotation)])43 44 sbr_translation = sbr.getField("translation").getSFVec3f()45 sbr_rotation = sbr.getField("rotation").getSFRotation()46 sbr_t_cost = sum([(i1-i2)**2 for i1,i2 in zip(sbr_translation,init_translation)])47 sbr_r_cost = sum([(i1-i2)**2 for i1,i2 in zip(sbr_rotation,init_rotation)])48 #print("Angle Fitness - ",angle_fitness)49 #print("Load Fitness - ",(load_r_cost+load_t_cost))50 #print("Robot T Fitness ",(sbr_r_cost+sbr_t_cost))51 return angle_fitness+((load_r_cost+load_t_cost)*100+(sbr_r_cost+sbr_t_cost))*3052 53 54 55def send_genotype(genotype):56 genotype_string = [str(g) for g in genotype]57 genotype_string = ','.join(genotype_string)58 59 emitter.send(genotype_string.encode('utf-8'))60 61def restore_robot_position():62 global init_translation,init_rotation63 sbr_translation.setSFVec3f(init_translation)64 sbr_rotation.setSFRotation(init_rotation)65 load_translation.setSFVec3f(load_init_translation)66 load_rotation.setSFRotation(load_init_rotation)67 68def evaluate_genotype(genotype):69 #test_genotype = [6.70891752445785, -2.984975676757869, 148.50048150101875, 655.0303108723926]70 # send genotype to robot71 send_genotype(genotype)72 73 # run for some time74 run_seconds(90)75 #store fitness76 fitness = getPerformanceData()77 #print("Supervisor:Fitness of ",genotype," - %f "%(fitness))78 79 sbr.resetPhysics()80 restore_robot_position()81 82 run_seconds(5,True)83 84 sbr.resetPhysics()85 restore_robot_position()86 87 # reset physics88 return fitness89 90def run_optimization():91 global population92 93 print("---\n")94 print("Starting Optimization")95 print("Population Size %i , Genome Size %i"%(POPULATION_SIZE,GENOTYPE_SIZE))96 for gen in range(NUM_GENERATIONS):97 population_fitness = []98 for ind in range(POPULATION_SIZE):99 print("Generation %i , Genotype %i "%(gen,ind))100 #population_get_genotype101 genotype = population[ind]102 #evaluate_genotype103 fitness = abs(evaluate_genotype(genotype))104 105 population_fitness.append(fitness)106 best_fitness,best_fitness_val = population_get_fittest(population,population_fitness)107 average_fitness = population_get_average_fitness(population_fitness)108 print("Best Fitness ",best_fitness)109 print("Best Fitness Value - %f"%best_fitness_val)110 print("Average Fitness - %f"%average_fitness)111 if(gen < NUM_GENERATIONS-1):112 population = population_reproduce(population,population_fitness)113 return best_fitness114 115def main():116 #Initiate keyboard 117 global init_translation,init_rotation,population,sbr_translation,sbr_rotation,load_init_translation,load_init_rotation,load_translation,load_rotation118 119 keyb = Keyboard()120 keyb.enable(timestep)121 count = 0122 sbr_translation = sbr.getField("translation")123 sbr_rotation = sbr.getField("rotation")124 init_translation = (sbr_translation.getSFVec3f())125 init_rotation = (sbr_rotation.getSFRotation())126 127 load_translation = load.getField("translation")128 load_rotation = load.getField("rotation")129 load_init_translation = (load_translation.getSFVec3f())130 load_init_rotation = (load_rotation.getSFRotation()) 131 132 population = population_create(POPULATION_SIZE,GENOTYPE_SIZE,bounds)133 fittest = run_optimization()134 135 send_genotype(fittest)136 #restore robots position137 restore_robot_position() 138 139 140 while superv.step(timestep) != -1:141 key = keyb.getKey()142 143 if key==ord('Q'):144 quit() 145 146main()...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!