Best Python code snippet using autotest_python
base_job.py
Source:base_job.py
...640 dictionary[key])641 )642 finally:643 self._keyval_container[path][1].append(" ...\n")644 self._write_keyval()645 def _write_reports(self):646 """647 Write TAP reports to file.648 """649 for key in self._reports_container.keys():650 if key == 'root':651 sub_dir = ''652 else:653 sub_dir = key654 tap_fh = open(os.sep.join(655 [self.resultdir, sub_dir, self.global_filename]656 ) + ".tap", 'w')657 tap_fh.write('1..' + str(len(self._reports_container[key])) + '\n')658 tap_fh.writelines(self._reports_container[key])659 tap_fh.close()660 def _write_keyval(self):661 """662 Write the self._keyval_container key values to a file.663 """664 for path in self._keyval_container.keys():665 tap_fh = open(path + ".tap", 'w')666 tap_fh.write('1..' + str(self._keyval_container[path][0]) + '\n')667 tap_fh.writelines(self._keyval_container[path][1])668 tap_fh.close()669 def write(self):670 """671 Write the TAP reports to files.672 """673 self._write_reports()674 def _write_tap_archive(self):...
repo.py
Source:repo.py
...99 text = text[len(prefix):]100 lines.append(prefix)101 lines.append(text)102 return ('\n' + indent).join(lines)103def _write_keyval(stream: TextIO, key: str, value: Any,104 split: Optional[str] = None):105 if not isinstance(value, str):106 if isinstance(value, bool):107 value = str(value).lower()108 else:109 value = str(value)110 # Skip field with empty value111 if not value:112 return113 text = f'{key}: {value}'114 if split is None:115 stream.write(text + '\n')116 return117 first = True118 for line in text.split('\n'):119 wrapped_line = wrap_str(line if first else ' ' + line,120 indent=' ', split_token=split)121 stream.write(wrapped_line + '\n')122 first = False123def yaml_load(filename: str):124 """125 helper: load yaml file with BasicLoader126 """127 return yaml.load(open(filename, 'rb').read(), Loader=yaml.BaseLoader)128def _init_logger(log_file: str) -> logging.Logger:129 """130 Init logger to print to *log_file*.131 Files rotate every day, and are kept for a month132 Args:133 log_file: name of the file where the log are stored.134 """135 log_handler = logging.handlers.TimedRotatingFileHandler(log_file,136 when='D',137 backupCount=30)138 formatter = logging.Formatter("%(asctime)s: %(levelname)s: %(message)s",139 "%Y-%m-%d %H:%M:%S")140 log_handler.setFormatter(formatter)141 logger = logging.getLogger(os.path.abspath(log_file))142 logger.addHandler(log_handler)143 logger.setLevel(logging.INFO)144 return logger145class _BinPkg:146 def __init__(self, name: str = '', data: Optional[Dict[str, Any]] = None):147 self.name = name148 self._data = {}149 if not data:150 return151 # Copy with unfolding value of each field152 for key, val in data.items():153 self._data[key] = val.replace('\n .\n ', '\n').replace('\n ', '')154 def __getattr__(self, key: str) -> Any:155 try:156 return self._data[key]157 except KeyError as error:158 raise AttributeError(f'unknown attribute {key}') from error159 def update(self, data: Dict[str, Any]):160 """161 Update fields from key/val of data162 """163 self._data.update(data)164 def write_keyvals(self, stream: TextIO):165 """166 Write list of key/values part for the binary package at hand.167 """168 _write_keyval(stream, 'name', self.name)169 for key in ('version', 'source', 'srcsha256',170 'ghost', 'sumsha256sums', 'depends', 'sysdepends'):171 _write_keyval(stream, key, self._data.get(key, ''))172 multiline_desc = _escape_newline(self.description)173 _write_keyval(stream, 'description', multiline_desc, split=' ')174 # Write at the end the repo specific fields175 for key in ('filename', 'size', 'sha256'):176 _write_keyval(stream, key, self._data[key])177 stream.write('\n')178 def srcid(self) -> str:179 """180 Get a unique identifier of the associated source181 """182 return _srcid(self.source, self.srcsha256)183 @staticmethod184 def load(pkg_path: str) -> _BinPkg:185 """186 This function reads a mpk file and returns a _BinPkg describing it187 Args:188 pkg_path: the path through the mpk file to read.189 """190 parser = Parser()...
binary_package.py
Source:binary_package.py
...15def _metadata_folder() -> str:16 folder = 'var/lib/mmpack/metadata'17 os.makedirs(folder, exist_ok=True)18 return folder19def _write_keyval(stream: TextIO, key: str, value: Any,20 split: Optional[str] = None):21 if not isinstance(value, str):22 if isinstance(value, bool):23 value = str(value).lower()24 else:25 value = str(value)26 # Skip field with empty value27 if not value:28 return29 text = f'{key}: {value}'30 if split is None:31 stream.write(text + '\n')32 return33 for line in text.split('\n'):34 wrapped_line = wrap_str(line, indent=' ', split_token=split)35 stream.write(wrapped_line + '\n')36def _gen_dep_list(dependencies: Dict[str, List[Version]]) -> List[str]:37 deps = []38 for dep, minmaxver in dependencies.items():39 minver = minmaxver[0]40 maxver = minmaxver[1]41 if str(minver) == str(maxver):42 if not minver.is_any():43 dep += f' (= {minver})'44 deps.append(dep)45 else:46 if not minver.is_any():47 deps.append(f'{dep} (>= {minver})')48 if not maxver.is_any():49 deps.append(f'{dep} (< {maxver})')50 return deps51def _gen_sha256sums(sha256sums_path: str):52 """53 assume to be run with pkgdir as current dir54 """55 # Compute hashes of all installed files56 cksums = {}57 for filename in glob('**', recursive=True):58 # skip folder and MMPACK/info59 if isdir(filename) or filename == 'MMPACK/info':60 continue61 # Add file with checksum62 cksums[filename] = sha256sum(filename, follow_symlink=False)63 # Write the file sha256sums file64 with open(sha256sums_path, 'wt', newline='\n') as sums_file:65 for filename in sorted(cksums):66 sums_file.write(f'{filename}: {cksums[filename]}\n')67class BinaryPackage:68 # pylint: disable=too-many-instance-attributes69 """70 Binary package class71 """72 def __init__(self, name: str, version: Version, source: str, arch: str,73 tag: str, spec_dir: str, src_hash: str, ghost: bool):74 # pylint: disable=too-many-arguments75 self.name = name76 self.version = version77 self.source = source78 self.arch = arch79 self.tag = tag80 self.spec_dir = spec_dir81 self.src_hash = src_hash82 self.pkg_path = None83 self.ghost = ghost84 self.description = ''85 # * System dependencies are stored as opaque strings.86 # Those are supposed to be handles by system tools,87 # => format is a set of strings.88 # * mmpack dependencies are expressed as the triplet89 # dependency name, min and max version (inclusive)90 # => format is a dict {depname: [min, max], ...}91 self._dependencies = {'sysdepends': set(), 'depends': {}}92 self.provides = {}93 self.install_files = set()94 def licenses_dir(self):95 """96 return the license directory of the package97 """98 licenses_dir = f'share/licenses/{self.name}'99 os.makedirs(licenses_dir, exist_ok=True)100 return licenses_dir101 def _get_specs_provides(self) -> Dict[str, Dict[str, Version]]:102 """103 return a dict containing the specified interface of given package104 Look for a <self.name>.provides file within the project's mmpack105 folder, load it and return its parsed values as a dictionary.106 return an empty interface dict if none was specified.107 """108 # TODO: also work with the last package published109 provide_spec_name = f'{self.spec_dir}/{self.name}.provides'110 specs_provides = dict()111 try:112 specs_provides.update(yaml_load(provide_spec_name))113 dprint('symbols read from ' + provide_spec_name)114 except FileNotFoundError:115 # return an empty dict if nothing has been provided116 pass117 return specs_provides118 def _write_basic_pkginfo(self, stream: TextIO):119 _write_keyval(stream, 'name', self.name)120 _write_keyval(stream, 'version', self.version)121 _write_keyval(stream, 'source', self.source)122 _write_keyval(stream, 'srcsha256', self.src_hash)123 def _gen_pkginfo(self, pkginfo_path: str):124 with open(pkginfo_path, 'wt',125 newline='\n', encoding='utf-8') as stream:126 self._write_basic_pkginfo(stream)127 _write_keyval(stream, 'ghost', self.ghost)128 # preserve end of line in description by inserting ' .' lines129 multiline_desc = self.description.replace('\n', '\n .\n ')130 _write_keyval(stream, 'description', multiline_desc, split=' ')131 deps = ', '.join(_gen_dep_list(self._dependencies['depends']))132 _write_keyval(stream, 'depends', deps, split=', ')133 sysdeps = ', '.join(self._dependencies['sysdepends'])134 _write_keyval(stream, 'sysdepends', sysdeps, split=', ')135 def _gen_info(self, pkgdir: str):136 """137 This generate the info file and sha256sums. It must be the last step138 before calling _make_archive().139 """140 pushdir(pkgdir)141 metadata_folder = _metadata_folder()142 sha256sums_path = f'{metadata_folder}/{self.name}.sha256sums'143 pkginfo_path = f'{metadata_folder}/{self.name}.pkginfo'144 self._gen_pkginfo(pkginfo_path)145 _gen_sha256sums(sha256sums_path)146 sumsha256sums = sha256sum(sha256sums_path)147 with open('MMPACK/metadata', 'wt',148 newline='\n', encoding='utf-8') as stream:149 _write_keyval(stream, 'metadata-version', METADATA_VERSION)150 self._write_basic_pkginfo(stream)151 _write_keyval(stream, 'sumsha256sums', sumsha256sums)152 _write_keyval(stream, 'pkginfo-path', './' + pkginfo_path)153 _write_keyval(stream, 'sumsha-path', './' + sha256sums_path)154 # Create info file155 info = {'version': self.version,156 'source': self.source,157 'description': self.description,158 'ghost': self.ghost,159 'srcsha256': self.src_hash,160 'sumsha256sums': sumsha256sums}161 info.update(self._dependencies)162 yaml_serialize({self.name: info}, 'MMPACK/info')163 popdir()164 def _store_provides(self, pkgdir: str):165 pushdir(pkgdir)166 metadata_folder = _metadata_folder()167 pkginfo = self.get_pkginfo()...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!