Best Python code snippet using autotest_python
update.py
Source: update.py
1import copy2import datetime3from collections import defaultdict4from pathlib import Path5from warnings import warn6class Updater():7 def __init__(self):8 tag_list = 'mp ls ne za_pred za_ante dp_label dp_head sr_pred sr_args cr'.split(' ')9 self.col_idx = {v:i+3 for i,v in zip(range(len(tag_list)), tag_list)}10 def config(self, tsv_path, comment, log):11 self.tsv_path = Path(tsv_path)12 self.comment_file, self.comment_enc = Path(comment.name), comment.encoding13 self.log_file, self.log_enc = Path(log.name), log.encoding14 comment.close(); log.close()15 self.log_file.unlink()16 self.comment_file.unlink()17 def _rec_ddict(self):18 return defaultdict(self._rec_ddict)19 def load_prepatch(self, ppatch_file):20 self.patch = []21 self.comment_list = []22 # define pre_patch23 self.prepatch = self._rec_ddict()24 lines = [line.strip('\n') for line in ppatch_file]25 ppatch_file.close()26 for line in lines:27 one_ppatch = line.strip('\n').split('\t')28 if not len(one_ppatch) == 4:29 raise Exception(f"Line not composed of 4 columns, current line: {line}, number of columns : {len(one_ppatch)}")30 #check if comment31 if not one_ppatch[-1] == '':32 self.comment_list.append('\t'.join(one_ppatch))33 doc_id = '-'.join(one_ppatch[0].split('-')[:2])34 self.prepatch[doc_id][one_ppatch[0]][one_ppatch[1]] = one_ppatch[2]35 def make_patch(self):36 self._patch_dict = self._rec_ddict()37 for doc_id, gwid_items in self.prepatch.items():38 tsv_file = self.tsv_path/f'{doc_id}.unified.min.tsv'39 #TODO: need refactoring, repeated structure.... 1) tsv load and check 2) writable check40 if tsv_file.exists():41 with tsv_file.open(encoding = 'utf8') as f: lines = f.readlines()42 #to check if writable43 copied_tsv = copy.copy(lines)44 for line_idx, line in enumerate(lines):45 tsv_line= line.strip('\n').split('\t')46 # if matched line exists in prepatch47 if tsv_line[0] in self.prepatch[doc_id].keys():48 self._cp_patchline = copy.copy(self.prepatch[doc_id][tsv_line[0]])49 for field, after in self.prepatch[doc_id][tsv_line[0]].items():50 after = after.strip('\n')51 # CAUTION, shallow copys, be sure not to make change in nested object52 del self._cp_patchline[field]53 # fix morpheme unit in case [mp, ls, en]54 if field.split('.')[0] in ['mp', 'ls', 'ne'] and '.' in field:55 field_name, sub_field = field.split('.')56 field_idx = self.col_idx[field_name]57 before = tsv_line[field_idx].split(' + ')[int(sub_field)-1]58 if not before == after:59 # --- from write60 sub_fields = tsv_line[field_idx].split(' + ')61 field_name, sub_idx = field.split('.')62 sub_fields[int(sub_idx)-1] = after63 tsv_line[field_idx] = ' + '.join(sub_fields)64 lines[line_idx] = '\t'.join(tsv_line).strip('\n') + '\n'65 # ---66 self._patch_dict[doc_id][tsv_line[0]][field] = after67 self.patch.append([tsv_line[0], field, before, after, ''])68 else:69 field_idx = self.col_idx[field]70 before = tsv_line[field_idx].strip('\n')71 if not tsv_line[field_idx] == after:72 73 # --- from write74 field_idx = self.col_idx[field]75 tsv_line[field_idx] = after76 lines[line_idx] = '\t'.join(tsv_line).strip('\n') + '\n'77 # --- 78 self._patch_dict[doc_id][tsv_line[0]][field] = after79 self.patch.append([tsv_line[0], field, before, after, ''])80 # error Not all prepatchs reviewed line field has not existing gwid81 if self._cp_patchline:82 raise Exception(f"Some prepatch lines not checked, gwid: {self._cp_patchline.keys()}, field: {tsv_line[0]}")83 self._cp_patchline = {}84 85 if not len(lines) == len(copied_tsv):86 raise Exception(f"{tsv_file.name} tsv patch before/after line number not matched. before: {len(copied_tsv)}, after: {len(lines)}")87 else:88 raise Exception(f"corresponding tsv file doesn't exist, given prepatch document id: {doc_id}")89 self.patch.sort()90 def write(self):91 self.datenow = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")92 if self._patch_dict:93 for doc_id, gwid_items in self._patch_dict.items():94 tsv_file = self.tsv_path/f'{doc_id}.unified.min.tsv'95 with tsv_file.open(encoding = 'utf8') as f: lines = f.readlines()96 copied_original = copy.copy(lines)97 for line_idx, line in enumerate(lines):98 tsv_line= line.split('\t')99 if tsv_line[0] in self._patch_dict[doc_id].keys():100 for field, after in self._patch_dict[doc_id][tsv_line[0]].items(): 101 if field.split('.')[0] in ['mp', 'ls', 'ne'] and '.' in field:102 field_name, sub_idx = field.split('.')103 field_idx = self.col_idx[field_name]104 sub_fields = tsv_line[field_idx].split(' + ')105 sub_fields[int(sub_idx)-1] = after106 tsv_line[field_idx] = ' + '.join(sub_fields)107 lines[line_idx] = '\t'.join(tsv_line).strip('\n') + '\n'108 109 else:110 field_idx = self.col_idx[field]111 tsv_line[field_idx] = after112 lines[line_idx] = '\t'.join(tsv_line).strip('\n') + '\n'113 if not copied_original == lines:114 with tsv_file.open('w') as f: print(''.join(lines).strip('\n'), file=f)115 if self.comment_list:116 with self.comment_file.open('w', encoding = self.comment_enc) as f:117 for log in self.comment_list:118 line = log.split('\t')[:2] + [self.datenow] + [log.split('\t')[-1]]119 print('\t'.join(line).strip('\n'), file = f)120 if self.patch:121 with self.log_file.open('w', encoding = self.log_enc) as f:122 for log in self.patch:123 line = log[:2] + [self.datenow] + log[2:4]...
conftest.py
Source: conftest.py
...9 return faker.Faker()10# pylama:ignore=C90111@pytest.fixture12def patch_dict():13 def _patch_dict(orig, updates):14 for k, v in updates.items():15 delete = str(k).startswith("-")16 replace = str(k).startswith("!")17 if delete or replace:18 k = k[1:]19 if delete:20 del orig[k]21 continue22 if k not in orig or replace:23 orig[k] = v24 elif isinstance(orig[k], dict) and isinstance(v, dict):25 _patch_dict(orig[k], v)26 elif isinstance(orig[k], list) and (27 isinstance(v, list) or isinstance(v, dict)28 ):29 _patch_list(orig[k], v)30 else:31 orig[k] = v32 return orig33 def _patch_list(orig, updates):34 if isinstance(updates, list):35 updates = {i: updates[i] for i in range(len(updates))}36 for k, v in updates.items():37 delete = str(k).startswith("-")38 replace = str(k).startswith("!")39 if delete or replace:40 k = int(k[1:])41 if delete:42 del orig[k]43 continue44 if k >= len(orig) or replace:45 orig += [None] * (k - len(orig) + 1)46 orig[k] = v47 elif isinstance(orig[k], list) and (48 isinstance(v, list) or isinstance(v, dict)49 ):50 _patch_list(orig[k], v)51 elif isinstance(orig[k], dict) and isinstance(v, dict):52 _patch_dict(orig[k], v)53 else:54 orig[k] = v55 return orig56 return _patch_dict57@pytest.fixture58def fake_address(fake, patch_dict):59 def _fake_address(**opts):60 address = patch_dict(61 {62 "address": None,63 "address_family": 4,64 "cidr": None,65 "gateway": None,66 "management": True,...
test_acid_diff.py
Source: test_acid_diff.py
...12from .test_acid import gen_random_grid, gen_random_scalar, gen_random_str13GENERATION_NUMBER, PERCENT_PATCH, PERCENT_MOVE_COL, PERCENT_ADD_VAL, PERCENT_DUPLICATE = (10, 30, 5, 10, 5)14class RefuseRemove(BaseException):15 pass16def _patch_dict(a_dict, cols=None):17 """18 Args:19 a_dict:20 cols:21 """22 a_dict = a_dict.copy()23 max_rand = int(len(a_dict) * (PERCENT_PATCH / 100))24 keys = list(a_dict.keys())25 # Remove REMOVE flag26 for val in a_dict.values():27 if val is REMOVE:28 raise RefuseRemove()29 if keys:30 for _ in range(0, random.randint(0, max_rand) + 1):31 j = random.randint(0, len(keys) - 1)32 k = keys[j]33 if k != 'id':34 while True:35 a_dict[k] = gen_random_scalar()36 if a_dict[k] is not REMOVE:37 break38 # Add keys39 if cols and random.randint(0, 100) < PERCENT_ADD_VAL:40 k = list(cols.keys())[random.randint(0, len(cols) - 1)]41 if k != 'id':42 a_dict[k] = gen_random_str()43 return a_dict44def gen_diff_metadata(metadata):45 """46 Args:47 metadata:48 """49 return _patch_dict(metadata)50def gen_diff_meta_cols(cols):51 """52 Args:53 cols:54 """55 cols = cols.copy()56 for col in cols:57 cols[col] = gen_diff_metadata(cols[col])58 # Move col59 if random.randint(0, 100) < PERCENT_MOVE_COL:60 i = random.randint(0, len(cols) - 1)61 j = random.randint(0, len(cols) - 1)62 k = cols.at(i)63 col = cols.pop_at(i)64 cols.add_item(k, col, index=j)65 return cols66def gen_new_row(grid):67 """68 Args:69 grid:70 """71 for row in grid:72 row = _patch_dict(row, grid.column)73 yield _patch_dict(row)74def gen_diff(orig):75 """76 Args:77 orig:78 """79 new_metadata = gen_diff_metadata(orig.metadata)80 new_meta_cols = gen_diff_meta_cols(orig.column)81 grid = Grid(orig.version, metadata=new_metadata, columns=new_meta_cols)82 for row in gen_new_row(orig):83 grid.append(row)84 if "id" not in row and random.randint(0, 100) < PERCENT_DUPLICATE:85 grid.append(row.copy())86 return grid87def _try_diff():...
Check out the latest blogs from LambdaTest on this topic:
When most firms employed a waterfall development model, it was widely joked about in the industry that Google kept its products in beta forever. Google has been a pioneer in making the case for in-production testing. Traditionally, before a build could go live, a tester was responsible for testing all scenarios, both defined and extempore, in a testing environment. However, this concept is evolving on multiple fronts today. For example, the tester is no longer testing alone. Developers, designers, build engineers, other stakeholders, and end users, both inside and outside the product team, are testing the product and providing feedback.
“Test frequently and early.” If you’ve been following my testing agenda, you’re probably sick of hearing me repeat that. However, it is making sense that if your tests detect an issue soon after it occurs, it will be easier to resolve. This is one of the guiding concepts that makes continuous integration such an effective method. I’ve encountered several teams who have a lot of automated tests but don’t use them as part of a continuous integration approach. There are frequently various reasons why the team believes these tests cannot be used with continuous integration. Perhaps the tests take too long to run, or they are not dependable enough to provide correct results on their own, necessitating human interpretation.
In an ideal world, you can test your web application in the same test environment and return the same results every time. The reality can be difficult sometimes when you have flaky tests, which may be due to the complexity of the web elements you are trying to perform an action on your test case.
As part of one of my consulting efforts, I worked with a mid-sized company that was looking to move toward a more agile manner of developing software. As with any shift in work style, there is some bewilderment and, for some, considerable anxiety. People are being challenged to leave their comfort zones and embrace a continuously changing, dynamic working environment. And, dare I say it, testing may be the most ‘disturbed’ of the software roles in agile development.
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!