Best Python code snippet using pytest
test_buildtools.py
Source:test_buildtools.py
1"""2NOTE: Run all tests from inside the tests folder, as: 3 >>> pytest test_buildtools.py4Suite of tests for the bash script based- build tools that we have written. 5"""6import pytest7import os 8import subprocess9from ncap_iac.utils import dev_builder10"""11Suite of tests to check that new pipelines are configured correctly using configure.sh 12"""13class ConstructBlueprintBase():14 """15 Abstract class for tests on constructing pipelines. Shares buildUp and tearDown methods to all relevant test classes. 16 """17 def tearDown(self,pathname):18 """19 Cleanup function: deletes the folder with given name after testing is done. 20 inputs:21 pathname (str): the relative name of the path where the test directory was created. 22 """23 os.remove(os.path.join(pathname,"stack_config_template.json"))24 os.rmdir(pathname)25 subprocess.call(["git","rm",pathname])26class ConstructProfileBase():27 """28 Abstract class for tests on constructing pipelines. Shares buildUp and tearDown methods to all relevant test classes. 29 """30 def tearDown(self,pathname):31 """32 Cleanup function: deletes the folder with given name after testing is done. 33 inputs:34 pathname (str): the relative name of the path where the test directory was created. 35 """36 os.remove(os.path.join(pathname,"user_config_template.json"))37 os.rmdir(pathname)38class TestConfigureBlueprint(ConstructBlueprintBase): 39 def test_basic_configure(self,pytestconfig):40 """41 Test the basic functionality of configure.sh given a file name and run from the iac_utils directory. 42 """43 44 ## Create this folder 45 os.chdir(os.path.join(pytestconfig.rootdir,"tests/"))46 self.pathname = "autogen_test_stack"47 currdir = os.getcwd()48 os.chdir("../ncap_iac/ncap_blueprints/iac_utils")49 subprocess.call(["bash","configure.sh",self.pathname])50 assert os.path.exists(os.path.join("../",self.pathname))51 assert os.path.exists(os.path.join("../",self.pathname,"stack_config_template.json"))52 os.chdir(currdir)53 self.tearDown(os.path.join("../ncap_iac/ncap_blueprints",self.pathname))54 def test_ncap_blueprints_paths_configure(self,pytestconfig):55 """56 Test the basic functionality of configure.sh given a file name and run from the ncap_blueprints directory 57 """58 ## Create this folder 59 os.chdir(os.path.join(pytestconfig.rootdir,"tests/"))60 self.pathname = "autogen_test_stack_blueprint"61 ## Try with ncap_blueprints:62 currdir = os.getcwd()63 os.chdir("../ncap_iac/ncap_blueprints")64 subprocess.call(["bash","iac_utils/configure.sh",self.pathname])65 assert os.path.exists(self.pathname)66 assert os.path.exists(os.path.join(self.pathname,"stack_config_template.json"))67 os.chdir(currdir)68 self.tearDown(os.path.join("ncap_blueprints",self.pathname))69 def test_ncap_iac_paths_configure(self,pytestconfig):70 """71 Test the basic functionality of configure.sh given a file name and run from the ncap_iac directory. 72 """73 os.chdir(os.path.join(pytestconfig.rootdir,"tests/"))74 ## Create this folder 75 self.pathname = "autogen_test_stack_iac"76 ## Try with ncap_iac77 currdir = os.getcwd()78 subprocess.call(["bash","../ncap_iac/ncap_blueprints/iac_utils/configure.sh",self.pathname])79 assert os.path.exists(os.path.join("ncap_blueprints/",self.pathname))80 assert os.path.exists(os.path.join("ncap_blueprints/",self.pathname,"stack_config_template.json"))81 os.chdir(currdir)82 self.tearDown(os.path.join("ncap_blueprints",self.pathname))83 def test_ncap_pipeline_configure(self,pytestconfig):84 """85 Test the basic functionality of configure.sh given a file name and run from the ncap_iac directory. 86 """87 os.chdir(os.path.join(pytestconfig.rootdir,"tests/"))88 ## Create this folder 89 self.pathname = "autogen_test_stack_pipeline"90 ## Make one:91 currdir = os.getcwd()92 subprocess.call(["bash","../ncap_iac/ncap_blueprints/iac_utils/configure.sh",self.pathname])93 assert os.path.exists(os.path.join("ncap_blueprints/",self.pathname))94 assert os.path.exists(os.path.join("ncap_blueprints/",self.pathname,"stack_config_template.json"))95 os.chdir(os.path.join("../ncap_iac/ncap_blueprints/",self.pathname))96 97 ## Try from inside another pipeline folder. 98 self.pathname2 = "autogen_2_test_stack_blueprint"99 subprocess.call(["bash","../iac_utils/configure.sh",self.pathname2])100 assert os.path.exists(os.path.join("../",self.pathname2))101 assert os.path.exists(os.path.join("../",self.pathname2,"stack_config_template.json"))102 os.chdir(currdir)103 self.tearDown(os.path.join("../ncap_blueprints",self.pathname))104 self.tearDown(os.path.join("../ncap_blueprints",self.pathname2))105class TestConfigureProfile(ConstructProfileBase): 106 def test_basic_configure(self,pytestconfig):107 """108 Test the basic functionality of configure.sh given a valid file name and run from the iac_utils directory. 109 """110 os.chdir(os.path.join(pytestconfig.rootdir,"tests/"))111 ## Create this folder 112 self.pathname = "autogen-test-users"113 currdir = os.getcwd()114 os.chdir("../user_profiles/iac_utils")115 subprocess.call(["bash","configure.sh",self.pathname])116 assert os.path.exists(os.path.join("../",self.pathname))117 assert os.path.exists(os.path.join("../",self.pathname,"user_config_template.json"))118 os.chdir(currdir)119 self.tearDown(os.path.join("../user_profiles",self.pathname))120 def test_user_profiles_paths_configure(self,pytestconfig):121 """122 Test the basic functionality of configure.sh given a valid file name and run from the user_profiles directory 123 """124 os.chdir(os.path.join(pytestconfig.rootdir,"ncap_iac/tests/"))125 ## Create this folder 126 self.pathname = "autogen-test-users-profile"127 ## Try with user_profiles:128 currdir = os.getcwd()129 os.chdir("../user_profiles")130 subprocess.call(["bash","iac_utils/configure.sh",self.pathname])131 assert os.path.exists(self.pathname)132 assert os.path.exists(os.path.join(self.pathname,"user_config_template.json"))133 os.chdir(currdir)134 self.tearDown(os.path.join("../user_profiles",self.pathname))135 def test_ncap_iac_paths_configure(self,pytestconfig):136 """137 Test the basic functionality of configure.sh given a valid file name and run from the ncap_iac directory. 138 """139 os.chdir(os.path.join(pytestconfig.rootdir,"ncap_iac/tests/"))140 ## Create this folder 141 self.pathname = "autogen-test-users-iac"142 ## Try with ncap_iac143 currdir = os.getcwd()144 os.chdir("../")145 subprocess.call(["bash","user_profiles/iac_utils/configure.sh",self.pathname])146 assert os.path.exists(os.path.join("user_profiles/",self.pathname))147 assert os.path.exists(os.path.join("user_profiles/",self.pathname,"user_config_template.json"))148 os.chdir(currdir)149 self.tearDown(os.path.join("../user_profiles",self.pathname))150 def test_ncap_pipeline_configure(self,pytestconfig):151 """152 Test the basic functionality of configure.sh given a valid file name and run from another pipeline directory. 153 """154 os.chdir(os.path.join(pytestconfig.rootdir,"ncap_iac/tests/"))155 ## Create this folder 156 self.pathname = "autogen-test-users-pipeline"157 ## Make one:158 currdir = os.getcwd()159 os.chdir("../")160 subprocess.call(["bash","user_profiles/iac_utils/configure.sh",self.pathname])161 assert os.path.exists(os.path.join("user_profiles/",self.pathname))162 assert os.path.exists(os.path.join("user_profiles/",self.pathname,"user_config_template.json"))163 os.chdir(os.path.join("user_profiles/",self.pathname))164 165 ## Try from inside another pipeline folder. 166 self.pathname2 = "autogen-2-test-users-pipeline"167 subprocess.call(["bash","../iac_utils/configure.sh",self.pathname2])168 assert os.path.exists(os.path.join("../",self.pathname2))169 assert os.path.exists(os.path.join("../",self.pathname2,"user_config_template.json"))170 os.chdir(currdir)171 self.tearDown(os.path.join("../user_profiles",self.pathname))172 self.tearDown(os.path.join("../user_profiles",self.pathname2))173 def test_name_underscores(self,pytestconfig,capfd):174 """175 Test that we're correctly catching incorrectly configured paths (no underscores. )176 """177 os.chdir(os.path.join(pytestconfig.rootdir,"tests/"))178 self.pathname = "autogen_test_users_iac"179 ## Try with ncap_iac180 currdir = os.getcwd()181 os.chdir("../")182 subprocess.call(["bash","user_profiles/iac_utils/configure.sh",self.pathname])183 captured = capfd.readouterr()184 assert captured.err.split("\n")[3] == "AssertionError: Names must be alphanumeric"185 os.chdir(currdir)186 187 def test_name_uppercase(self,pytestconfig,capfd):188 """189 Test that we're correctly catching incorrectly configured paths (no underscores. )190 """191 os.chdir(os.path.join(pytestconfig.rootdir,"tests/"))192 self.pathname = "Autogen-test-users-iac"193 ## Try with ncap_iac194 currdir = os.getcwd()195 os.chdir("../")196 subprocess.call(["bash","user_profiles/iac_utils/configure.sh",self.pathname])197 captured = capfd.readouterr()198 assert captured.err.split("\n")[3] == "AssertionError: Names must be alphanumeric"199 os.chdir(currdir)200class Test_Build_Blueprint():201 """Class to test the process of going from custom blueprint -> aws cloudformation json file 202 """203 def test_init(self,pytestconfig):204 os.chdir(os.path.join(pytestconfig.rootdir,"ncap_iac/utils"))205 z = subprocess.call(["python","dev_builder.py","../../tests/unit_tests/fixture_dir/fixture_stack/stack_config_template.json","websubstack"])...
conftest.py
Source:conftest.py
1import json2import shutil3import pathlib4import typing as tp5from dataclasses import dataclass6import allure7import pytest8import solana9from _pytest.config import Config10from utils.operator import Operator11from utils.faucet import Faucet12from utils.web3client import NeonWeb3Client13LAMPORT_PER_SOL = 1_000_000_00014@dataclass15class EnvironmentConfig:16 # name: str17 proxy_url: str18 solana_url: str19 faucet_url: str20 network_id: int21 operator_neon_rewards_address: tp.List[str]22 spl_neon_mint: str23 operator_keys: tp.List[str]24def pytest_addoption(parser):25 parser.addoption("--network", action="store", default="night-stand", help="Which stand use")26 parser.addoption("--envs", action="store", default="envs.json", help="Filename with environments")27def pytest_configure(config: Config):28 network_name = config.getoption("--network")29 envs_file = config.getoption("--envs")30 with open(pathlib.Path().parent.parent / envs_file, "r+") as f:31 environments = json.load(f)32 assert network_name in environments, f"Environment {network_name} doesn't exist in envs.json"33 config.environment = EnvironmentConfig(**environments[network_name])34@pytest.fixture(scope="session", autouse=True)35def faucet(pytestconfig: Config) -> Faucet:36 return Faucet(pytestconfig.environment.faucet_url)37@pytest.fixture(scope="session", autouse=True)38def web3_client(pytestconfig: Config) -> NeonWeb3Client:39 client = NeonWeb3Client(pytestconfig.environment.proxy_url, pytestconfig.environment.network_id)40 return client41@pytest.fixture(scope="session", autouse=True)42def sol_client(pytestconfig: Config):43 client = solana.rpc.api.Client(pytestconfig.environment.solana_url)44 return client45@pytest.fixture(scope="session", autouse=True)46def operator(pytestconfig: Config, web3_client: NeonWeb3Client) -> Operator:47 return Operator(48 pytestconfig.environment.proxy_url,49 pytestconfig.environment.solana_url,50 pytestconfig.environment.network_id,51 pytestconfig.environment.operator_neon_rewards_address,52 pytestconfig.environment.spl_neon_mint,53 pytestconfig.environment.operator_keys,54 web3_client=web3_client55 )56@pytest.fixture(scope="session", autouse=True)57def allure_environment(pytestconfig: Config, web3_client: NeonWeb3Client):58 opts = {59 "Proxy.Version": web3_client.get_proxy_version()["result"],60 "EVM.Version": web3_client.get_evm_version()["result"],61 "CLI.Version": web3_client.get_cli_version()["result"]62 }63 allure_path = pytestconfig.getoption("--alluredir")64 yield opts65 with open(pathlib.Path() / allure_path / "environment.properties", "w+") as f:66 f.write("\n".join(map(lambda x: f"{x[0]}={x[1]}", opts.items())))67 f.write("\n")68 categories_from = pathlib.Path() / "allure" / "categories.json"69 categories_to = pathlib.Path() / allure_path / "categories.json"70 shutil.copy(categories_from, categories_to)71@pytest.fixture(scope="class")72def prepare_account(operator, faucet, web3_client):73 """Create new account for tests and save operator pre/post balances"""74 with allure.step("Create account for tests"):75 acc = web3_client.eth.account.create()76 with allure.step(f"Request 1000 NEON from faucet for {acc.address}"):77 faucet.request_neon(acc.address, 1000)78 assert web3_client.get_balance(acc) == 100079 start_neon_balance = operator.get_neon_balance()80 start_sol_balance = operator.get_solana_balance()81 with allure.step(f"Operator initial balance: {start_neon_balance / LAMPORT_PER_SOL} NEON {start_sol_balance / LAMPORT_PER_SOL} SOL"):82 pass83 yield acc84 end_neon_balance = operator.get_neon_balance()85 end_sol_balance = operator.get_solana_balance()86 with allure.step(f"Operator end balance: {end_neon_balance / LAMPORT_PER_SOL} NEON {end_sol_balance / LAMPORT_PER_SOL} SOL"):87 pass88 with allure.step(89 f"Account end balance: {web3_client.get_balance(acc)} NEON"):...
test_2b_adfv2pipeline_adls.py
Source:test_2b_adfv2pipeline_adls.py
1import pytest2import os3import requests4import time5import sys6import pyarrow.parquet as pq7from azure.core.credentials import AccessToken8from azure.storage.blob import BlobServiceClient9from datetime import datetime10# prerequisites11# 0. Create resource group, ADFv2 instance, storage account, storage container12# 1. Download csv file of 1GB and add file to storage account13# 2. Create snapshot of file14# 3. Add 1 record to csv file and upload file again to storage account15# 4. Create new snapshot of file16# 5. Run this script17#18class CustomTokenCredential(object):19 def __init__(self, name):20 self.name = name21 def get_token(self, *scopes, **kwargs):22 access_token = self.name23 expires_on = 100024 return AccessToken(access_token, expires_on)25@pytest.fixture()26def name(pytestconfig):27 return pytestconfig.getoption("token")28def test_adfv2_dataflows_adlsgen2_delete_piicolumns(pytestconfig):29 # https://docs.microsoft.com/en-us/samples/azure-samples/data-lake-analytics-python-auth-options/authenticating-your-python-application-against-azure-active-directory/30 # access_token = credentials.token["access_token"]31 adfv2name = pytestconfig.getoption('adfv2name')32 adlsgen2stor = pytestconfig.getoption('adlsgen2stor')33 accesskeyadls = pytestconfig.getoption('accesskeyadls')34 subscriptionid = pytestconfig.getoption('subscriptionid')35 rg = pytestconfig.getoption('rg')36 #37 # Since Azure DevOps SPN created ADFv2 instance, Azure DevOps SPN has owner rights and can execute pipelin using REST (Contributor is minimally required)38 tokenadf = pytestconfig.getoption('tokenadf')39 adfv2namepipeline = "adlsgen2-dataflows-delete-piicolumns"40 url = "https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataFactory/factories/{}/pipelines/{}/createRun?api-version=2018-06-01".format(subscriptionid, rg, adfv2name, adfv2namepipeline)41 response = requests.post(url, 42 headers={'Authorization': "Bearer " + tokenadf},43 json={44 "outputfolder": "curated"45 }46 )47 #48 assert response.status_code == 200, "test failed, pipeline not started, " + str(response.content)49 #50 runid = response.json()['runId']51 #52 count = 053 while True:54 response = requests.get(55 "https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataFactory/factories/{}/pipelineruns/{}?api-version=2018-06-01".format(subscriptionid, rg, adfv2name, runid),56 headers={'Authorization': "Bearer " + tokenadf}57 )58 status = response.json()['status']59 if status == "InProgress" or status == "Queued":60 count += 161 if count < 30:62 time.sleep(30) # wait 30 seconds before next status update63 else:64 # timeout65 break66 else:67 # pipeline has end state, script has finished68 print("hier2")69 break70 #71 assert count <30, "test failed, time out"72 #credential = CustomTokenCredential(tokenadls)73 credential = accesskeyadls74 storage_account_source_url = "https://" + adlsgen2stor + ".blob.core.windows.net"75 #76 client_source = BlobServiceClient(account_url=storage_account_source_url, credential=credential)77 container_source = client_source.get_container_client("curated")78 #79 blob_list = container_source.list_blobs(include=['snapshots'])80 for blob in blob_list:81 bottled_file = blob.name82 assert bottled_file == "AdultCensusIncomePIIremoved.parquet", "parquet file not found"83 #84 blob_client = client_source.get_blob_client(container="curated", blob="AdultCensusIncomePIIremoved.parquet")85 with open("AdultCensusIncomePIIremoved.parquet", "wb") as my_blob:86 download_stream = blob_client.download_blob()87 my_blob.write(download_stream.readall())88 #89 parquet_file = pq.ParquetFile('AdultCensusIncomePIIremoved.parquet')90 i = 091 while i < parquet_file.metadata.row_group(0).num_columns:92 print(parquet_file.metadata.row_group(0).column(i).path_in_schema)93 if parquet_file.metadata.row_group(0).column(i).path_in_schema == "age":94 break95 i+=196 # ...
test_2a_adfv2pipeline_sqldb.py
Source:test_2a_adfv2pipeline_sqldb.py
1import pytest2import os3import requests4import adal5from msrestazure.azure_active_directory import AADTokenCredentials6import time7import sys8import pyodbc9import struct10# prerequisites11# 0. Create resource group, ADFv2 instance, storage account, storage container12# 1. Download csv file of 1GB and add file to storage account13# 2. Create snapshot of file14# 3. Add 1 record to csv file and upload file again to storage account15# 4. Create new snapshot of file16# 5. Run this script17#18@pytest.fixture()19def name(pytestconfig):20 return pytestconfig.getoption("token")21def test_run_pipeline(pytestconfig):22 # https://docs.microsoft.com/en-us/samples/azure-samples/data-lake-analytics-python-auth-options/authenticating-your-python-application-against-azure-active-directory/23 # access_token = credentials.token["access_token"]24 tokendb = pytestconfig.getoption('tokendb')25 adfv2name = pytestconfig.getoption('adfv2name')26 sqlserver = pytestconfig.getoption('sqlserver') + '.database.windows.net'27 sqldatabase = pytestconfig.getoption('sqldatabase')28 sqllogin = pytestconfig.getoption('sqllogin')29 sqlpassword = pytestconfig.getoption('sqlpassword')30 azuredevopsspndbadmin = pytestconfig.getoption('azuredevopsspndbadmin')31 subscriptionid = pytestconfig.getoption('subscriptionid')32 rg = pytestconfig.getoption('rg')33 #34 # Since Azure DevOps SPN created ADFv2 instance, Azure DevOps SPN has owner rights and can execute pipelin using REST (Contributor is minimally required)35 tokenadf = pytestconfig.getoption('tokenadf')36 adfv2namepipeline = "sqldb-dataflows-remove-nullvalues"37 url = "https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataFactory/factories/{}/pipelines/{}/createRun?api-version=2018-06-01".format(subscriptionid, rg, adfv2name, adfv2namepipeline)38 response = requests.post(url, 39 headers={'Authorization': "Bearer " + tokenadf},40 json={}41 )42 #43 assert response.status_code == 200, "test failed, pipeline not started, " + str(response.content)44 #45 runid = response.json()['runId']46 #47 count = 048 while True:49 response = requests.get(50 "https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataFactory/factories/{}/pipelineruns/{}?api-version=2018-06-01".format(subscriptionid, rg, adfv2name, runid),51 headers={'Authorization': "Bearer " + tokenadf}52 )53 status = response.json()['status']54 if status == "InProgress" or status == "Queued":55 count += 156 if count < 30:57 time.sleep(30) # wait 30 seconds before next status update58 else:59 # timeout60 break61 else:62 # pipeline has end state, script has finished63 print("hier2")64 break65 #66 assert count <30, "test failed, time out"67 #68 if azuredevopsspndbadmin == 1:69 # Azure DevOPs is SQL Azure AD admin and ADFv2 MI shall be added to database as user70 accessToken = bytes(tokendb, 'utf-8')71 exptoken = b""72 for i in accessToken:73 exptoken += bytes({i})74 exptoken += bytes(1)75 tokenstruct = struct.pack("=i", len(exptoken)) + exptoken76 connstr = 'DRIVER={ODBC Driver 17 for SQL Server};SERVER='+sqlserver+';DATABASE='+sqldatabase77 conn = pyodbc.connect(connstr, attrs_before = { 1256:tokenstruct })78 cursor = conn.cursor()79 #80 create_user ="CREATE USER [" + adfv2name + "] FROM EXTERNAL PROVIDER;"81 cursor.execute(create_user)82 add_role = "EXEC sp_addrolemember [db_owner], [" + adfv2name + "];"83 cursor.execute(add_role) 84 else:85 # ADFv2 MI is Azure AD admin, SQL local user shall be used to query results database from Azure DevOps86 connstr = 'DRIVER={ODBC Driver 17 for SQL Server};SERVER='+sqlserver+';UID='+sqllogin+';PWD='+sqlpassword+';DATABASE='+sqldatabase87 conn = pyodbc.connect(connstr)88 cursor = conn.cursor()89 cursor.execute("SELECT count(*) FROM Sales.OrdersAggregated WHERE Comments != 'test123'")90 row = cursor.fetchall()91 value = [record[0] for record in row]92 #...
Looking for an in-depth tutorial around pytest? LambdaTest covers the detailed pytest tutorial that has everything related to the pytest, from setting up the pytest framework to automation testing. Delve deeper into pytest testing by exploring advanced use cases like parallel testing, pytest fixtures, parameterization, executing multiple test cases from a single file, and more.
Skim our below pytest tutorial playlist to get started with automation testing using the pytest framework.
https://www.youtube.com/playlist?list=PLZMWkkQEwOPlcGgDmHl8KkXKeLF83XlrP
Get 100 minutes of automation test minutes FREE!!