Best Python code snippet using pyscreenshot_python
data.py
Source:data.py
1import os, math, random2import numpy as np3from abc import ABCMeta, abstractclassmethod4from road_detect_project.augmenter.aerial import Creator5from road_detect_project.model.params import dataset_params6# class DataLoader():7# """load a class """8#9# @staticmethod10# def create():11# pass12class AbstractDataset(metaclass=ABCMeta):13 """14 All dataloader should inherit from this class. 15 Implements chunking of the dataset. For instance,16 subsets of examples are loaded onto GPU iteratively during an epoch.17 This also includes a chunk switching method.18 """19 def __init__(self):20 self.data_set = {"test_PyQt5": None,21 "train": None,22 "valid": None}23 self.all_training = []24 self.active = []25 self.all_shared_hook = [] # WHen casted, cannot set_value on them26 self.nr_examples = {}27 @abstractclassmethod28 def load(self, dataset_path):29 """Loading and transforming logic for dataset"""30 return31 def destroy(self):32 pass33 def get_chunk_number(self):34 return len(self.all_training)35 def get_elements(self, idx):36 return len(self.all_training[idx][0])37 def get_total_number_of_batches(self, batch_size):38 s = sum(len(c[0]) for c in self.all_training)39 return math.ceil(s / batch_size)40 def _chunkify(self, dataset, nr_of_chunks, batch_size):41 # Round items per chunk down until there is an exact number of minibatches.42 # Multiple of batch_size43 items_per_chunk = len(dataset[0]) / nr_of_chunks44 if items_per_chunk < batch_size:45 print("Chunk limit too small,or batch size too large.\n"46 "Each chunk must include at least one batch.")47 raise Exception("Fix chunk_size and batch size.")48 temp = int(items_per_chunk / batch_size)49 items_per_chunk = batch_size * temp50 data, labels = dataset51 # TODO:do floatX operation twice.52 chunks = [[AbstractDataset._float32(data[x:x + items_per_chunk]),53 AbstractDataset._float32(labels[x:x + items_per_chunk])]54 for x in range(0, len(dataset[0]), items_per_chunk)]55 # If the last chunk is less than batch size, it is cut.56 # No reason for an unnecessary swap.57 last_chunk_size = len(chunks[-1][0])58 if last_chunk_size < batch_size * 15:59 chunks.pop(-1)60 print("------ Remove last chunk."61 " {} elements not enough for at least one minibatch of {}".format(62 last_chunk_size, batch_size))63 return chunks64 def set_nr_examples(self, train, valid, test):65 self.nr_examples["train"] = train[0].shape[0]66 self.nr_examples["valid"] = valid[0].shape[0]67 self.nr_examples["test_PyQt5"] = test[0].shape[0]68 def get_report(self):69 return self.nr_examples70 def switch_active_training_set(self, idx):71 """72 Each epoch a large number of examples will be seen by model. 73 Often all examples will not fit on the GPU atthe same time.74 This method, switches the data that are currently reciding in the gpu.75 Will be called nr_of_chunks times per epoch.76 """77 new_chunk_x, new_chunk_y = AbstractDataset._list_to_arr(self.all_training[idx])78 self.active[0] = new_chunk_x79 self.active[1] = new_chunk_y80 def shared_dataset(self, data_xy, cast_to_int=True):81 data_x, data_y = data_xy82 # print(data_x.shape)83 # print(data_y.shape)84 # shared_x = tf.Variable(data_x)85 # shared_y = tf.Variable(data_y)86 shared_x = data_x87 shared_y = data_y88 self.all_shared_hook.append(shared_y)89 if cast_to_int:90 # print("---- Casted to int")91 # Since labels are index integers they have to be treated as such during computations.92 # Shared_y is therefore cast to int.93 return shared_x, shared_y94 else:95 return shared_x, shared_y96 @staticmethod97 def _float32(d):98 return np.asarray(d, dtype="float32")99 @staticmethod100 def _list_to_arr(d):101 d_x, d_y = d102 d_x = np.asarray(d_x, dtype="float32")103 d_y = np.asarray(d_y, dtype="float32")104 return d_x, d_y105 @staticmethod106 def _get_file_path(dataset):107 data_dir, data_file = os.path.split(dataset)108 # TODO: Add some robustness, like checking if file is folder and correct that109 assert os.path.isfile(dataset)110 return dataset111 @staticmethod112 def dataset_check(name, dataset, batch_size):113 # If there are are to few examples for at least one batch,114 # the dataset is invalid.115 if len(dataset[0]) < batch_size:116 print("Insufficent examples in {}. {} examples not enough "117 "for at least one minibatch".format(118 name, len(dataset[0])))119 raise Exception("Decrease batch_size or increase samples_per_image")120 @staticmethod121 def dataset_sizes(train, valid, test, chunks):122 mb = 1000000.0123 train_size = sum(data.nbytes for data in train) / mb124 valid_size = sum(data.nbytes for data in valid) / mb125 test_size = sum(data.nbytes for data in test) / mb126 nr_of_chunks = math.ceil(train_size / chunks)127 print('---- Minimum number of training chunks: {}'.format(nr_of_chunks))128 print('---- Dataset at least:')129 print('---- Training: \t {}mb'.format(train_size))130 print('---- Validation: {}mb'.format(valid_size))131 print('---- Testing: \t {}mb'.format(test_size))132 return nr_of_chunks133 @staticmethod134 def dataset_shared_stats(image_shape, label_shape, chunks):135 print('')136 print('Preparing shared variables for datasets')137 print('---- Image data shape: {}, label data shape: {}'.format(image_shape, label_shape))138 print('---- Max chunk size of {}mb'.format(chunks))139 @staticmethod140 def dataset_chunk_stats(nr_training_chunks, elements_pr_chunk, elements_last_chunk):141 print('---- Actual number of training chunks: {}'.format(nr_training_chunks))142 print('---- Elements per chunk: {}'.format(elements_pr_chunk))143 print('---- Last chunk size: {}'.format(elements_last_chunk))144class AerialDataset(AbstractDataset):145 def load(self, dataset_path, params=None, batch_size=16):146 print("Creating aerial image dataset")147 self.std = params.dataset_std148 chunks = params.chunk_size149 # TODO: ensure that the dataset is as expected.150 creator = Creator(dataset_path,151 dim=(params.input_dim, params.output_dim),152 rotation=params.use_rotation,153 preprocessing=params.use_preprocessing,154 std=self.std,155 only_mixed=params.only_mixed_labels,156 reduce_testing=params.reduce_testing,157 reduce_training=params.reduce_training,158 reduce_validation=params.reduce_validation)159 train, valid, test = creator.dynamically_create(160 params.samples_per_image,161 enable_label_noise=params.use_label_noise,162 label_noise=params.label_noise,163 only_mixed=params.only_mixed_labels)164 # Testing dataset size requirements165 AerialDataset.dataset_check("train", train, batch_size)166 AerialDataset.dataset_check("valid", valid, batch_size)167 AerialDataset.dataset_check("test_PyQt5", test, batch_size)168 # print("*********************************************")169 # print(train[1].shape)170 # print(test_PyQt5[0].shape)171 AbstractDataset.dataset_shared_stats(train[0].shape, train[1].shape, chunks)172 self.set_nr_examples(train, valid, test)173 nr_of_chunks = AbstractDataset.dataset_sizes(train, valid, test, chunks)174 training_chunks = self._chunkify(train, nr_of_chunks, batch_size)175 AerialDataset.dataset_chunk_stats(len(training_chunks),176 len(training_chunks[0][0]),177 len(training_chunks[-1][0]))178 AbstractDataset.dataset_chunk_stats(179 len(training_chunks),180 len(training_chunks[0][0]),181 len(training_chunks[-1][0]))182 self.active = list(self.shared_dataset(training_chunks[0], cast_to_int=False))183 self.data_set['train'] = self.active184 self.data_set['valid'] = self.shared_dataset(valid, cast_to_int=True)185 self.data_set['test_PyQt5'] = self.shared_dataset(test, cast_to_int=True)186 # Not stored on the GPU, unlike the shared variables defined above.187 self.all_training = training_chunks188 return True189 def gen_data(self, data_name, epoch=1000, batch_size=16):190 chunks = self.get_chunk_number()191 for i in range(epoch):192 if data_name == "train":193 for chunk in range(chunks):194 self.switch_active_training_set(chunk)195 nr_elements = self.get_elements(chunk)196 train_data = self.data_set[data_name]197 batches = [[train_data[0][x:x + batch_size], train_data[1][x:x + batch_size]]198 for x in range(0, nr_elements, batch_size)]199 for batch in batches:200 yield batch201 else:202 data = self.data_set[data_name]203 nr_elements = data[0].shape[0]204 batches = [[data[0][x:x + batch_size], data[1][x:x + batch_size]]205 for x in range(0, nr_elements, batch_size)]206 for batch in batches:207 yield batch208class AerialCurriculumDataset(AbstractDataset):209 """210 Data loader for pre-generated dataset. 211 IE, curriculum learning and datasets too big to fit in main memory.212 The class includes a method for stage switching and mixing. 213 this method switches the training set and control the behavior of the switch.214 """215 def load_set(self, path, set, stage=None):216 base_path = ''217 if stage is not None:218 base_path = os.path.join(path, set, stage)219 else:220 base_path = os.path.join(path, set)221 data = np.load(os.path.join(base_path, "data", "examples.npy"))222 labels = np.load(os.path.join(base_path, "labels", "examples.npy"))223 return data, labels224 def mix_in_next_stage(self):225 self.stage += 1226 if self.nr_of_stages <= self.stage:227 print("No more stage available")228 return229 current_stage = "stage{}".format(self.stage)230 labels = np.load(os.path.join(self.stage_path, current_stage, "labels", "examples.npy"))231 data = np.load(os.path.join(self.stage_path, current_stage, "data", "examples.npy"))232 print("------ Mixing in {} with {} examples".format(current_stage, data.shape[0]))233 if not dataset_params.with_replacement:234 elements = data.shape[0]235 shuffle_count = 0236 shuffle_index = list(range(elements))237 random.shuffle(shuffle_index)238 for c in range(len(self.all_training)):239 nr_chunk_examples = self.all_training[c][0].shape[0]240 for x in range(nr_chunk_examples):241 if shuffle_count < elements:242 i = shuffle_index.pop()243 self.all_training[c][0][x] = data[i]244 self.all_training[c][1][x] = labels[i]245 else:246 break247 shuffle_count += 1248 else:249 nr_chunks = len(self.all_training)250 for i in range(data.shape[0]):251 c = random.randint(0, nr_chunks - 1)252 nr_chunk_examples = self.all_training[c][0].shape[0]253 x = random.randint(0, nr_chunk_examples - 1)254 self.all_training[c][0][x] = data[i]255 self.all_training[c][0][x] = labels[i]256 def load(self, dataset_path, params=None, batch_size=16):257 print("------- Loading aerial curriculum dataset")258 chunks = params.chunk_size259 self.std = params.dataset_std260 # For later stage loading261 self.stage = 0262 self.stage_path = os.path.join(dataset_path, "train")263 self.nr_of_stages = len(os.listdir(self.stage_path))264 train = self.load_set(dataset_path, "train", stage="stage{}".format(self.stage))265 valid = self.load_set(dataset_path, "valid")266 test = self.load_set(dataset_path, "test_PyQt5")267 # Testing dataset size requirements268 AerialCurriculumDataset.dataset_check("train", train, batch_size)269 AerialCurriculumDataset.dataset_check("valid", valid, batch_size)270 AerialCurriculumDataset.dataset_check("test_PyQt5", test, batch_size)271 AerialCurriculumDataset.dataset_shared_stats(train[0].shape, train[1].shape, chunks)272 self.set_nr_examples(train, valid, test)273 nr_of_chunks = AerialCurriculumDataset.dataset_sizes(train, valid, test, chunks)274 training_chunks = self._chunkify(train, nr_of_chunks, batch_size)275 AerialCurriculumDataset.dataset_chunk_stats(len(training_chunks),276 len(training_chunks[0][0]),277 len(training_chunks[-1][0]))278 self.active = list(self.shared_dataset(training_chunks[0], cast_to_int=False))279 self.data_set["train"] = self.active280 self.data_set["valid"] = self.shared_dataset(valid, cast_to_int=True)281 self.data_set["test_PyQt5"] = self.shared_dataset(test, cast_to_int=True)282 # Not stored on the GPU, unlike the shared variables defined above.283 self.all_training = training_chunks284 return True285if __name__ == '__main__':286 # dataset = AerialDataset()287 # path = r"/media/sunwl/sunwl/datum/roadDetect_project/Massachusetts/"288 # params = dataset_params289 # dataset.load(path, params=params)290 # dataset.switch_active_training_set(0)291 # train_data = dataset.data_set['train']292 # print("train")293 # print(train_data[0].shape)294 # print(train_data[1].shape)295 # valid_data = dataset.data_set['valid']296 # print("valid")297 # next(dataset.gen_data("valid", epoch=1))298 # print(valid_data[0].shape)299 # print(valid_data[1].shape)300 # test_data = dataset.data_set['test_PyQt5']301 # print("test_PyQt5")302 # print(test_data[0].shape)303 # print(test_data[1].shape)304 # print(dataset.get_report())305 # dataset.switch_active_training_set(2)306 # dataset.get_elements(2)307 # new_data = dataset.data_set['train']308 # print(new_data[0].shape)309 # Test curriculum dataset310 dataset_2 = AerialCurriculumDataset()311 path = "../tools/my_data/"312 dataset_2.load(path, params=dataset_params)313 dataset_2.switch_active_training_set(0)314 print("train\n")315 train_data = dataset_2.data_set["train"]316 print(train_data[0].shape)317 print(train_data[1].shape)318 print("valid\n")319 valid_data = dataset_2.data_set["valid"]320 print(train_data[0].shape)321 print(train_data[1].shape)322 print("test_PyQt5\n")323 test_data = dataset_2.data_set["test_PyQt5"]324 print(train_data[0].shape)...
main.py
Source:main.py
1from sources.PyGS import *2import sys3def test_pyqt5(style):4 app = QApplication(sys.argv)5 # Force the style to be the same on all OSs:6 app.setStyle('Windows')7 # Now use a palette to switch to dark colors:8 palette = QPalette()9 palette.setColor(QPalette.Window, QColor(56, 56, 56))10 palette.setColor(QPalette.WindowText, Qt.white)11 palette.setColor(QPalette.Base, QColor(56, 56, 56))12 palette.setColor(QPalette.AlternateBase, QColor(63, 63, 63))13 palette.setColor(QPalette.ToolTipBase, Qt.white)14 palette.setColor(QPalette.ToolTipText, Qt.white)15 palette.setColor(QPalette.Text, Qt.white)16 palette.setColor(QPalette.Button, QColor(56, 56, 56))17 palette.setColor(QPalette.ButtonText, Qt.white)18 palette.setColor(QPalette.BrightText, QColor(0, 128, 152))19 palette.setColor(QPalette.Link, QColor(42, 130, 218))20 palette.setColor(QPalette.Highlight, QColor(0, 128, 152))21 palette.setColor(QPalette.HighlightedText, Qt.white)22 palette.setColor(QPalette.Disabled, QPalette.Window, QColor(51, 51, 51))23 palette.setColor(QPalette.Disabled, QPalette.ButtonText, QColor(111, 111, 111))24 palette.setColor(QPalette.Disabled, QPalette.Text, QColor(122, 118, 113))25 palette.setColor(QPalette.Disabled, QPalette.WindowText, QColor(122, 118, 113))26 palette.setColor(QPalette.Disabled, QPalette.Base, QColor(32, 32, 32))27 if style:28 app.setPalette(palette)29 path = os.path.dirname(os.path.realpath(__file__))30 ex = PyGS(path)31 sys.exit(app.exec_())32if __name__ == '__main__':...
test_pyqt5.py
Source:test_pyqt5.py
1'''2*********************************************************************************************3 *File: test_pyqt5.py4 *Project: servodogVersion2.05 *Filepath: /home/guoyucan/ServoDogVersion1.0/servodogVersion2.0/test_pyqt5.py 6 *File Created: Sunday, 28th November 2021 8:24:59 pm7 *Author: Guo Yucan, 12032421@mail.sustech.edu.cn 8 *Last Modified: Sunday, 28th November 2021 8:25:02 pm9 *Modified By: Guo Yucan, 12032421@mail.sustech.edu.cn 10 *Copyright @ 2021 , BionicDL LAB, SUSTECH, Shenzhen, China 11*********************************************************************************************12'''13import sys 14from PyQt5.QtWidgets import (QWidget, QToolTip, QPushButton, QApplication)15from PyQt5.QtGui import QFont 16class Example(QWidget):17 def __init__(self):18 super().__init__()19 self.initUI()20 21 def initUI(self):...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!