Best Python code snippet using mamba
config.py
Source:config.py
1from __future__ import annotations23__docformat__ = "google"4__doc__ = """"""56from collections import defaultdict7from types import ModuleType8from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union9from dataclasses import dataclass, field10from toolz import pipe11from dataclasses_jsonschema import JsonSchemaMixin1213import unittest14import logging15import os16import yaml17import inspect1819from .smuggle import smuggle20from .spec import Spec2122T = TypeVar('T')23TestPath = str24AuxiliaryPath = str25UseCase = str26LastResult = str27TestId = str28Workbench = str29Device = str30Suite = str31FunctionalityItem = str32Toggle = bool33KalashYamlObj = Dict[str, Any]34ArbitraryYamlObj = Dict[str, Any]35ConstructorArgsTuple = Tuple[Any, ...]36TestModule = ModuleType37TemplateVersion = str38OneOrList = Union[List[T], T]3940# Please document type aliases below:4142__doc__ += """43Module containing the entire configuration data model for Kalash4445Type Aliases:4647* `TestPath` = `str`48* `AuxiliaryPath` = `str`49* `UseCase` = `str`50* `LastResult` = `str`51* `TestId` = `str`52* `Workbench` = `str`53* `Device` = `str`54* `Suite` = `str`55* `FunctionalityItem` = `str`56* `Toggle` = `bool`57* `KalashYamlObj` = `Dict[str, Any]`58* `ArbitraryYamlObj` = `Dict[str, Any]`59* `ConstructorArgsTuple` = `Tuple[Any, ...]`60* `TestModule` = `ModuleType`61* `TemplateVersion` = `str`62* `OneOrList` = `Union[List[T], T]`63"""646566@dataclass67class CliConfig:68 """A class collecting all CLI options fed into69 the application. The instance is created by the70 main function and used downstream in the call stack.7172 Args:73 file (Optional[str]): config filename (YAML or Python file)74 log_dir (str): base directory for log files75 group_by (Optional[str]): group logs by a particular property76 from the metadata tag77 no_recurse (bool): don't recurse into subfolders when scanning78 for tests to run79 debug (bool): run in debug mode80 no_log (bool): suppress logging81 no_log_echo (bool): suppress log echoing to STDOUT82 spec_path (str): custom YAML/Meta specification path, the file83 should be in YAML format84 log_level (int): `logging` module log level85 log_format (str): formatter string for `logging` module logger86 what_if (Optional[str]): either 'ids' or 'paths', prints hypothetical87 list of IDs or paths of collected tests instead of running the88 actual tests, useful for debugging and designing test suites89 fail_fast (bool): if `True` the test suite won't be continued if90 at least one of the tests that have been collected and triggered91 has failed92 """93 file: Optional[str] = None94 # if not running in CLI context we initialize reasonable defaults:95 log_dir: str = '.'96 group_by: Optional[str] = None97 no_recurse: bool = False98 debug: bool = False99 no_log: bool = False100 no_log_echo: bool = False101 spec_path: str = 'spec.yaml'102 log_level: int = logging.INFO103 log_format: str = '%(message)s'104 what_if: Optional[str] = None105 fail_fast: bool = False106107 def __post_init__(self):108 spec_abspath = os.path.join(os.path.dirname(__file__), self.spec_path)109 self.spec = Spec.load_spec(spec_abspath)110 self.log_format = self.spec.cli_config.log_formatter111112113class classproperty(object):114 """https://stackoverflow.com/a/13624858115 Only Python 3.9 allows stacking `@classmethod`116 and `@property` decorators to obtain static117 properties. We use this decorator as a workaround118 since we wish to support 3.7+ for quite a while.119 """120 def __init__(self, fget):121 self.fget = fget122123 def __get__(self, owner_self, owner_cls):124 return self.fget(owner_cls)125126127@dataclass128class SharedMetaElements:129 """Collects Metadata-modifying methods with `CliConfig` instance130 providing a parameter closure. Most methods here are related131 to built-in interpolation of patterns like `$(WorkDir)`.132 """133134 cli_config: CliConfig135136 def _interpolate_workdir(self, ipt: str) -> str:137 """Interpolates CWD variable. This variable is used to138 resolve paths within Kalash YAML relative to the current139 working directory. Equivalent to using the dotted file path.140141 Args:142 ipt (str): input string to interpolate143 yaml_abspath (str): path to the Kalash YAML file.144145 Returns: interpolated string146 """147 return os.path.normpath(148 ipt.replace(149 self.cli_config.spec.test.interp_cwd, os.getcwd()150 )151 )152153 def _interpolate_this_file(self, ipt: str, yaml_abspath: str) -> str:154 """Interpolates THIS_FILE variable. THIS_FILE is used to resolve155 paths within Kalash YAML relative to the YAML file itself.156157 Args:158 ipt (str): input string to interpolate159 yaml_abspath (str): path to the Kalash YAML file160 or the `.py` configuration file161162 Returns: interpolated string163 """164 return os.path.normpath(165 ipt.replace(166 self.cli_config.spec.test.interp_this_file,167 os.path.dirname(yaml_abspath)168 )169 )170171 def _interpolate_all(self, ipt: Union[str, None], yaml_abspath: str) -> Union[str, None]:172 """Interpolates all variable values using a toolz.pipe173174 Args:175 ipt (str): input string to interpolate176 yaml_abspath (str): path to the Kalash YAML file177 or the `.py` configuration file178179 Returns: interpolated string180 """181 if ipt:182 return pipe(183 self._interpolate_this_file(ipt, yaml_abspath),184 self._interpolate_workdir185 )186 return ipt187188 def resolve_interpolables(self, o: object, yaml_abspath: str):189 for k, v in o.__dict__.items():190 if type(v) is str:191 setattr(o, k, self._interpolate_all(v, yaml_abspath))192193194@dataclass195class Base:196 """Base config class. `Meta`, `Config` and `Test`197 inherit from this minimal pseudo-abstract base class.198 """199 @classmethod200 def from_yaml_obj(cls, yaml_obj: ArbitraryYamlObj, cli_config: CliConfig) -> Base:201 raise NotImplementedError("Base class methods should be overridden")202203 def get(self, argname: str):204 """`getattr` alias for those who wish to use this205 from within the `TestCase` class.206 """207 return getattr(self, argname, None)208209210@dataclass211class Meta(Base, JsonSchemaMixin):212 """Provides a specification outline for the Metadata tag213 in test templates.214215 Args:216 id (Optional[TestId]): unique test ID217 version (Optional[TemplateVersion]): template version218 use_cases (Optional[OneOrList[UseCase]]): one or more219 use case IDs (preferably from a task tracking system220 like Jira) that a particular test refers to221 workbenches (Optional[OneOrList[Workbench]]): one or more222 physical workbenches where the test should be triggered223 devices (Optional[OneOrList[Device]]): one or more device224 categories for which this test has been implemented225 suites (Optional[OneOrList[Suite]]): one or more arbitrary226 suite tags (should be used only if remaining tags don't227 provide enough possibilities to describe the context of228 the test script)229 functionality (Optional[OneOrList[FunctionalityItem]]): one230 or more functionality descriptors for the test script231 """232 id: Optional[TestId] = None233 version: Optional[TemplateVersion] = None234 use_cases: Optional[OneOrList[UseCase]] = None235 workbenches: Optional[OneOrList[Workbench]] = None236 devices: Optional[OneOrList[Device]] = None237 suites: Optional[OneOrList[Suite]] = None238 functionality: Optional[OneOrList[FunctionalityItem]] = None239 cli_config: CliConfig = CliConfig()240241 def __post_init__(self):242 frame = inspect.stack()[1]243 module = inspect.getmodule(frame[0])244 if module:245 module_path = os.path.abspath(module.__file__)246 SharedMetaElements(self.cli_config).resolve_interpolables(self, module_path)247248 @classmethod249 def from_yaml_obj(cls, yaml_obj: ArbitraryYamlObj, cli_config: CliConfig) -> Meta:250 block_spec = cli_config.spec.test251 meta_spec = cli_config.spec.meta252 params = dict(253 id=yaml_obj.get(block_spec.id, None),254 version=yaml_obj.get(meta_spec.template_version, None),255 use_cases=yaml_obj.get(meta_spec.related_usecase, None),256 workbenches=yaml_obj.get(meta_spec.workbench, None),257 devices=yaml_obj.get(block_spec.devices, None),258 suites=yaml_obj.get(block_spec.suites, None),259 functionality=yaml_obj.get(block_spec.functionality, None)260 )261 return Meta(262 **params263 )264265266@dataclass267class Test(Meta, JsonSchemaMixin):268 """Provides a specification outline for a single category269 of tests that should be collected, e.g. by path, ID or any270 other parameter inherited from `Meta`.271272 Args:273 path (Optional[OneOrList[TestPath]]): path to a test274 directory or a single test path275 id (Optional[OneOrList[TestId]]): one or more IDs to276 filter for277 no_recurse (Optional[Toggle]): if `True`, subfolders278 will not be searched for tests, intended for use with279 the `path` parameter280 last_result (Optional[LastResult]): if `OK` then filters281 out only the tests that have passed in the last run,282 if `NOK` then it only filters out those tests that283 have failed in the last run284 setup (Optional[AuxiliaryPath]): path to a setup script;285 runs once at the start of the test category run286 teardown (Optional[AuxiliaryPath]): path to a teardown287 script; runs once at the end of the test category288 run289 """290 path: Optional[OneOrList[TestPath]] = None291 id: Optional[OneOrList[TestId]] = None292 no_recurse: Optional[Toggle] = None293 last_result: Optional[LastResult] = None294 setup: Optional[AuxiliaryPath] = None295 teardown: Optional[AuxiliaryPath] = None296 cli_config: CliConfig = CliConfig()297298 def __post_init__(self):299 frame = inspect.stack()[1]300 module = inspect.getmodule(frame[0])301 if module:302 module_path = os.path.abspath(module.__file__)303 SharedMetaElements(self.cli_config).resolve_interpolables(self, module_path)304305 @classmethod306 def from_yaml_obj(cls, yaml_obj: ArbitraryYamlObj, cli_config: CliConfig) -> Test:307 """Loads `Test` blocks from a YAML object."""308309 block_spec = cli_config.spec.test310 base_class_instance = super().from_yaml_obj(yaml_obj, cli_config)311 return Test(312 path=yaml_obj.get(block_spec.path, None),313 no_recurse=yaml_obj.get(block_spec.no_recurse, None),314 last_result=yaml_obj.get(block_spec.last_result, None),315 setup=yaml_obj.get(block_spec.setup_script, None),316 teardown=yaml_obj.get(block_spec.teardown_script, None),317 **base_class_instance.__dict__318 )319320 @classproperty321 def _non_filters(cls):322 # ID is listed as non-filter beacuse it's handled323 # differently. A `Test` definition can filter for324 # multiple IDs. A `Meta` definition can only have325 # one ID (1 ID == 1 test case). Hence ID is handled326 # separately in the `apply_filters` function using327 # `match_id` helper328 return ['setup', 'teardown', 'path', 'id']329330331@dataclass332class Config(Base, JsonSchemaMixin):333 """Provides a specification outline for the runtime334 parameters. Where `Test` defines what tests to collect,335 this class defines global parameters determining how336 to run tests.337338 Args:339 report (str): directory path where reports will340 be stored in XML format341 setup (Optional[AuxiliaryPath]): path to a setup script;342 runs once at the start of the complete run343 teardown (Optional[AuxiliaryPath]): path to a teardown344 script; runs once at the end of the complete run345 """346 report: str = './kalash_reports'347 setup: Optional[AuxiliaryPath] = None348 teardown: Optional[AuxiliaryPath] = None349 cli_config: CliConfig = CliConfig()350351 def __post_init__(self):352 SharedMetaElements(self.cli_config).resolve_interpolables(self, __file__)353354 @classmethod355 def from_yaml_obj(cls, yaml_obj: Optional[ArbitraryYamlObj], cli_config: CliConfig) -> Config:356 """Loads `Test` blocks from a YAML object."""357 config_spec = cli_config.spec.config358 if yaml_obj:359 return Config(360 yaml_obj.get(config_spec.report, None),361 yaml_obj.get(config_spec.one_time_setup_script, None),362 yaml_obj.get(config_spec.one_time_teardown_script, None)363 )364 else:365 return Config()366367368@dataclass369class Trigger(JsonSchemaMixin):370 """Main configuration class collecting all information for371 a test run, passed down throughout the whole call stack.372373 Args:374 tests (List[Test]): list of `Test` categories, each375 describing a sliver of a test suite that shares certain376 test collection parameters377 config (Config): a `Config` object defining parameters378 telling Kalash *how* to run the tests379 cli_config (CliConfig): a `CliConfig` object representing380 command-line parameters used to trigger the test run381 modifying behavior of certain aspects of the application382 like logging or triggering speculative runs instead of383 real runs384 """385 tests: List[Test] = field(default_factory=list)386 config: Config = field(default_factory=lambda: Config())387 cli_config: CliConfig = field(default_factory=lambda: CliConfig())388389 @classmethod390 def from_file(cls, file_path: str, cli_config: CliConfig):391 """Creates a `Trigger` instance from a YAML or JSON file."""392 with open(file_path, 'r') as f:393 yaml_obj: ArbitraryYamlObj = defaultdict(lambda: None, yaml.safe_load(f))394 list_blocks: List[ArbitraryYamlObj] = \395 yaml_obj[cli_config.spec.test.tests]396 cfg_section: ArbitraryYamlObj = yaml_obj[cli_config.spec.config.cfg]397 tests = [Test.from_yaml_obj(i, cli_config) for i in list_blocks]398 config = Config.from_yaml_obj(cfg_section, cli_config)399 return Trigger(tests, config, cli_config)400401 def _resolve_interpolables(self, path: str):402 sm = SharedMetaElements(self.cli_config)403 for test in self.tests:404 sm.resolve_interpolables(test, path)405 sm.resolve_interpolables(self.config, path)406407 @classmethod408 def infer_trigger(cls, cli_config: CliConfig, default_path: str = '.kalash.yaml'):409 """Creates the Trigger instance from a YAML file or410 a Python file.411412 Args:413 path (str): path to the configuration file.414415 Returns: `Tests` object416 """417 path = cli_config.file if cli_config.file else default_path418 if path.endswith('.yaml') or path.endswith('.json'):419 t = cls()420 t = Trigger.from_file(os.path.abspath(path), cli_config)421 t._resolve_interpolables(path)422 return t423 else:424 module = smuggle(os.path.abspath(path))425 for _, v in module.__dict__.items():426 if type(v) is cls:427 v._resolve_interpolables(path)428 return v429 else:430 raise ValueError(431 f"No {cls.__name__} instance found in file {path}"432 )433434435PathOrIdForWhatIf = List[str]436CollectorArtifact = Tuple[unittest.TestSuite, PathOrIdForWhatIf] # can be a list of IDs or paths437 # or a full test suite438Collector = Callable[[TestPath, Trigger], CollectorArtifact]439440__doc__ += """441* `PathOrIdForWhatIf` = `List[str]`442* `CollectorArtifact` = `Tuple[unittest.TestSuite, PathOrIdForWhatIf]`443* `Collector` = `Callable[[TestPath, Trigger], CollectorArtifact]`
...
copr_distgit_client.py
Source:copr_distgit_client.py
1"""2copr-distgit-client code, moved to module to simplify unit-testing3"""4import argparse5import configparser6import errno7import glob8import logging9import pipes10import os11import subprocess12import sys13from six.moves.urllib.parse import urlparse14try:15 from rpmautospec import (16 specfile_uses_rpmautospec as rpmautospec_used,17 process_distgit as rpmautospec_expand,18 )19except ImportError:20 rpmautospec_used = lambda _: False21def log_cmd(command, comment="Running command"):22 """ Dump the command to stderr so it can be c&p to shell """23 command = ' '.join([pipes.quote(x) for x in command])24 logging.info("%s: %s", comment, command)25def check_output(cmd, comment="Reading stdout from command"):26 """ el6 compatible subprocess.check_output() """27 log_cmd(cmd, comment)28 process = subprocess.Popen(29 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)30 (stdout, _) = process.communicate()31 if process.returncode:32 raise RuntimeError("Exit non-zero: {0}".format(process.returncode))33 return stdout34def call(cmd, comment="Calling"):35 """ wrap sp.call() with logging info """36 log_cmd(cmd, comment)37 return subprocess.call(cmd)38def check_call(cmd, comment="Checked call"):39 """ wrap sp.check_call() with logging info """40 log_cmd(cmd, comment)41 subprocess.check_call(cmd)42def _load_config(directory):43 config = configparser.ConfigParser()44 files = glob.glob(os.path.join(directory, "*.ini"))45 logging.debug("Files %s in config directory %s", files, directory)46 config.read(files)47 config_dict = {48 "instances": {},49 "clone_host_map": {},50 }51 instances = config_dict["instances"]52 for section_name in config.sections():53 section = config[section_name]54 instance = instances[section_name] = {}55 for key in section.keys():56 # array-like config options57 if key in ["clone_hostnames", "path_prefixes"]:58 hostnames = section[key].split()59 instance[key] = [h.strip() for h in hostnames]60 else:61 instance[key] = section[key]62 for key in ["sources", "specs"]:63 if key in instance:64 continue65 instance[key] = "."66 if "sources_file" not in instance:67 instance["sources_file"] = "sources"68 if "default_sum" not in instance:69 instance["default_sum"] = "md5"70 for host in instance["clone_hostnames"]:71 if host not in config_dict["clone_host_map"]:72 config_dict["clone_host_map"][host] = {}73 host_dict = config_dict["clone_host_map"][host]74 for prefix in instance.get("path_prefixes", ["DEFAULT"]):75 if prefix in host_dict:76 msg = "Duplicate prefix {0} for {1} hostname".format(77 prefix, host,78 )79 raise RuntimeError(msg)80 host_dict[prefix] = instance81 return config_dict82def download(url, filename):83 """ Download URL as FILENAME using curl command """84 command = [85 "curl",86 "-H", "Pragma:",87 "-o", filename,88 "--location",89 "--remote-time",90 "--show-error",91 "--fail",92 url,93 ]94 if call(command):95 raise RuntimeError("Can't download file {0}".format(filename))96def mkdir_p(path):97 """ mimic 'mkdir -p <path>' command """98 try:99 os.makedirs(path)100 except OSError as err:101 if err.errno != errno.EEXIST:102 raise103def download_file_and_check(url, params, distgit_config):104 """ Download given URL (if not yet downloaded), and try the checksum """105 filename = params["filename"]106 sum_binary = params["hashtype"] + "sum"107 mkdir_p(distgit_config["sources"])108 if not os.path.exists(filename):109 logging.info("Downloading %s", filename)110 download(url, filename)111 else:112 logging.info("File %s already exists", filename)113 sum_command = [sum_binary, filename]114 output = check_output(sum_command).decode("utf-8")115 checksum, _ = output.strip().split()116 if checksum != params["hash"]:117 raise RuntimeError("Check-sum {0} is wrong, expected: {1}".format(118 checksum,119 params["hash"],120 ))121def _detect_clone_url():122 git_config = ".git/config"123 if not os.path.exists(git_config):124 msg = "{0} not found, $PWD is not a git repository".format(git_config)125 raise RuntimeError(msg)126 git_conf_reader = configparser.ConfigParser()127 git_conf_reader.read(git_config)128 return git_conf_reader['remote "origin"']["url"]129def get_distgit_config(config, forked_from=None):130 """131 Given the '.git/config' file from current directory, return the132 appropriate part of dist-git configuration.133 Returns tuple: (urlparse(clone_url), distgit_config)134 """135 url = forked_from136 if not url:137 url = _detect_clone_url()138 parsed_url = urlparse(url)139 if parsed_url.hostname is None:140 hostname = "localhost"141 else:142 hostname = parsed_url.hostname143 prefixes = config["clone_host_map"][hostname]144 prefix_found = None145 for prefix in prefixes.keys():146 if not parsed_url.path.startswith(prefix):147 continue148 prefix_found = prefix149 if not prefix_found:150 if "DEFAULT" not in prefixes:151 raise RuntimeError("Path {0} does not match any of 'path_prefixes' "152 "for '{1}' hostname".format(parsed_url.path,153 hostname))154 prefix_found = "DEFAULT"155 return parsed_url, prefixes[prefix_found]156def get_spec(distgit_config):157 """158 Find the specfile name inside distgit_config["specs"] directory159 """160 spec_dir = distgit_config["specs"]161 specfiles = glob.glob(os.path.join(spec_dir, '*.spec'))162 if len(specfiles) != 1:163 abs_spec_dir = os.path.join(os.getcwd(), spec_dir)164 message = "Exactly one spec file expected in {0} directory, {1} found".format(165 abs_spec_dir, len(specfiles),166 )167 raise RuntimeError(message)168 specfile = os.path.basename(specfiles[0])169 return specfile170def sources(args, config):171 """172 Locate the sources, and download them from the appropriate dist-git173 lookaside cache.174 """175 parsed_url, distgit_config = get_distgit_config(config, args.forked_from)176 namespace = parsed_url.path.lstrip('/').split('/')177 # drop the last {name}.git part178 repo_name = namespace.pop()179 if repo_name.endswith(".git"):180 repo_name = repo_name[:-4]181 namespace = list(reversed(namespace))182 output = check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"])183 output = output.decode("utf-8").strip()184 if output == "HEAD":185 output = check_output(["git", "rev-parse", "HEAD"])186 output = output.decode("utf-8").strip()187 refspec = output188 specfile = get_spec(distgit_config)189 name = specfile[:-5]190 sources_file = distgit_config["sources_file"].format(name=name)191 if not os.path.exists(sources_file):192 raise RuntimeError("{0} file not found".format(sources_file))193 logging.info("Reading sources specification file: %s", sources_file)194 with open(sources_file, 'r') as sfd:195 while True:196 line = sfd.readline()197 if not line:198 break199 kwargs = {200 "name": repo_name,201 "refspec": refspec,202 "namespace": namespace,203 }204 source_spec = line.split()205 if len(source_spec) == 2:206 # old md5/sha1 format: 0ced6f20b9fa1bea588005b5ad4b52c1 tar-1.26.tar.xz207 kwargs["hashtype"] = distgit_config["default_sum"].lower()208 kwargs["hash"] = source_spec[0]209 kwargs["filename"] = source_spec[1]210 elif len(source_spec) == 4:211 # SHA512 (tar-1.30.tar.xz.sig) = <HASH>212 kwargs["hashtype"] = source_spec[0].lower()213 kwargs["hash"] = source_spec[3]214 filename = os.path.basename(source_spec[1])215 kwargs["filename"] = filename.strip('()')216 else:217 msg = "Weird sources line: {0}".format(line)218 raise RuntimeError(msg)219 url_file = '/'.join([220 distgit_config["lookaside_location"],221 distgit_config["lookaside_uri_pattern"].format(**kwargs)222 ])223 download_file_and_check(url_file, kwargs, distgit_config)224def handle_autospec(spec_abspath, spec_basename, args):225 """226 When %auto* macros are used in SPEC_ABSPATH, expand them into a separate227 spec file within ARGS.OUTPUTDIR, and return the absolute filename of the228 specfile. When %auto* macros are not used, return SPEC_ABSPATH unchanged.229 """230 result = spec_abspath231 if rpmautospec_used(spec_abspath):232 git_dir = check_output(["git", "rev-parse", "--git-dir"])233 git_dir = git_dir.decode("utf-8").strip()234 if os.path.exists(os.path.join(git_dir, "shallow")):235 # Hack. The rpmautospec doesn't support shallow clones:236 # https://pagure.io/fedora-infra/rpmautospec/issue/227237 logging.info("rpmautospec requires full clone => --unshallow")238 check_call(["git", "fetch", "--unshallow"])239 # Expand the %auto* macros, and create the separate spec file in the240 # output directory.241 output_spec = os.path.join(args.outputdir, spec_basename)242 rpmautospec_expand(spec_abspath, output_spec)243 result = output_spec244 return result245def srpm(args, config):246 """247 Using the appropriate dist-git configuration, generate source RPM248 file. This requires running 'def sources()' first.249 """250 _, distgit_config = get_distgit_config(config, args.forked_from)251 cwd = os.getcwd()252 sources_dir = os.path.join(cwd, distgit_config["sources"])253 specs = os.path.join(cwd, distgit_config["specs"])254 spec = get_spec(distgit_config)255 mkdir_p(args.outputdir)256 spec_abspath = os.path.join(specs, spec)257 spec_abspath = handle_autospec(spec_abspath, spec, args)258 if args.mock_chroot:259 command = [260 "mock", "--buildsrpm",261 "-r", args.mock_chroot,262 "--spec", spec_abspath,263 "--sources", sources_dir,264 "--resultdir", args.outputdir,265 ]266 else:267 command = [268 "rpmbuild", "-bs", spec_abspath,269 "--define", "dist %nil",270 "--define", "_sourcedir {0}".format(sources_dir),271 "--define", "_srcrpmdir {0}".format(args.outputdir),272 "--define", "_disable_source_fetch 1",273 ]274 if args.dry_run or 'COPR_DISTGIT_CLIENT_DRY_RUN' in os.environ:275 log_cmd(command, comment="Dry run")276 else:277 check_call(command)278def _get_argparser():279 parser = argparse.ArgumentParser(prog="copr-distgit-client",280 description="""\281A simple, configurable python utility that is able to download sources from282various dist-git instances, and generate source RPMs.283The utility is able to automatically map the "origin" .git/config clone URL284(or --forked-from URL, if specified) to a corresponding dist-git instance285configured in /etc/copr-distgit-client directory.286""")287 # main parser288 default_confdir = os.environ.get("COPR_DISTGIT_CLIENT_CONFDIR",289 "/etc/copr-distgit-client")290 parser.add_argument(291 "--configdir", default=default_confdir,292 help="Where to load configuration files from")293 parser.add_argument(294 "--loglevel", default="info",295 help="Python logging level, e.g. debug, info, error")296 parser.add_argument(297 "--forked-from",298 metavar="CLONE_URL",299 help=("Specify that this git clone directory is a dist-git repository "300 "fork. If used, the default clone url detection from the "301 ".git/config file is disabled and CLONE_URL is used instead. "302 "This specified CLONE_URL is used to detect the appropriate "303 "lookaside cache pattern to download the sources."))304 subparsers = parser.add_subparsers(305 title="actions", dest="action")306 # sources parser307 subparsers.add_parser(308 "sources",309 description=(310 "Using the 'url' .git/config, detect where the right DistGit "311 "lookaside cache exists, and download the corresponding source "312 "files."),313 help="Download sources from the lookaside cache")314 # srpm parser315 srpm_parser = subparsers.add_parser(316 "srpm",317 help="Generate a source RPM",318 description=(319 "Generate a source RPM from the downloaded source files "320 "by 'sources' command, please run 'sources' first."),321 )322 srpm_parser.add_argument(323 "--outputdir",324 default="/tmp",325 help="Where to store the resulting source RPM")326 srpm_parser.add_argument(327 "--mock-chroot",328 help=("Generate the SRPM in mock buildroot instead of on host. The "329 "argument is passed down to mock as the 'mock -r|--root' "330 "argument."),331 )332 srpm_parser.add_argument(333 "--dry-run", action="store_true",334 help=("Don't produce the SRPM, just print the command which would be "335 "otherwise called"),336 )337 return parser338def main():339 """ The entrypoint for the whole logic """340 args = _get_argparser().parse_args()341 logging.basicConfig(342 level=getattr(logging, args.loglevel.upper()),343 format="%(levelname)s: %(message)s",344 )345 config = _load_config(args.configdir)346 try:347 if args.action == "srpm":348 srpm(args, config)349 else:350 sources(args, config)351 except RuntimeError as err:352 logging.error("%s", err)...
example_collector_spec.py
Source:example_collector_spec.py
...6from mamba import example, example_group, loader7from mamba.example_collector import ExampleCollector8def spec_relpath(name):9 return os.path.join('spec', 'fixtures', name)10def spec_abspath(name):11 return os.path.join(os.path.dirname(__file__), 'fixtures', name)12def example_names(examples):13 return [example.name for example in examples]14IRRELEVANT_PATH = spec_abspath('without_inner_contexts.py')15PENDING_DECORATOR_PATH = spec_abspath('with_pending_decorator.py')16PENDING_DECORATOR_AS_ROOT_PATH = spec_abspath('with_pending_decorator_as_root.py')17WITH_RELATIVE_IMPORT_PATH = spec_abspath('with_relative_import.py')18WITH_TAGS_PATH = spec_abspath('with_tags.py')19WITH_FOCUS_PATH = spec_abspath('with_focus.py')20SHARED_CONTEXT_PATH = spec_abspath('with_shared_context.py')21INCLUDED_CONTEXT_PATH = spec_abspath('with_included_context.py')22def _load_module(path):23 example_collector = ExampleCollector([path])24 return list(example_collector.modules())[0]25with description(ExampleCollector):26 with context('when loading from file'):27 with it('loads module from absolute path'):28 module = _load_module(IRRELEVANT_PATH)29 expect(inspect.ismodule(module)).to(be_true)30 with it('loads module from relative path'):31 module = _load_module(spec_relpath('without_inner_contexts.py'))32 expect(inspect.ismodule(module)).to(be_true)33 #FIXME: Mixed responsabilities in test [collect, load]??34 with context('when loading'):35 with it('orders examples by line number'):36 module = _load_module(spec_abspath('without_inner_contexts.py'))37 examples = loader.Loader().load_examples_from(module)38 expect(examples).to(have_length(1))39 expect(example_names(examples[0].examples)).to(equal(['it first example', 'it second example', 'it third example']))40 with it('places examples together and groups at the end'):41 module = _load_module(spec_abspath('with_inner_contexts.py'))42 examples = loader.Loader().load_examples_from(module)43 expect(examples).to(have_length(1))44 expect(example_names(examples[0].examples)).to(equal(['it first example', 'it second example', 'it third example', '#inner_context']))45 with context('when reading tags'):46 with it('reads tags from description parameters'):47 module = _load_module(WITH_TAGS_PATH)48 examples = loader.Loader().load_examples_from(module)49 expect(examples).to(have_length(1))50 expect(examples[0].has_tag('integration')).to(be_true)51 with it('reads tags from spec parameters'):52 module = _load_module(WITH_TAGS_PATH)53 examples = loader.Loader().load_examples_from(module)54 spec = next(iter(examples[0]))55 expect(spec).not_to(be_none)...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!