Best Python code snippet using behave
test_aggregate.py
Source:test_aggregate.py
...15 aggregator_task = tasks.Aggregator(self.config)16 url = 'https://raw.githubusercontent.com/frictionlessdata/goodtables-py/v1.0.0-alpha8/data/valid.csv'17 pipeline_instance = pipeline.Pipeline(data=url, format='csv',18 post_task=aggregator_task.run)19 results_before_run = self.read_file_contents(aggregator_task.result_file)20 pipeline_instance.run()21 results_after_run = self.read_file_contents(aggregator_task.result_file)22 self.assertEqual(len(results_after_run), len(results_before_run) + 1)23 def test_agregator_batch_run(self):24 """Test that Aggregator task updates run file after each batch"""25 config = self.config26 aggregator_task = tasks.Aggregator(config)27 def mokup_function(instance):28 aggregator_task.write_run()29 batch_options = config['goodtables']['arguments']['batch']30 batch_options['post_task'] = mokup_function31 batch_options['pipeline_options'] = config['goodtables']['arguments']['pipeline']32 batch = pipeline.Batch(aggregator_task.source_file, **batch_options)33 runs_before_run = self.read_file_contents(aggregator_task.run_file)34 batch.run()35 runs_after_run = self.read_file_contents(aggregator_task.run_file)36 self.assertGreater(len(runs_after_run), len(runs_before_run))37 def test_aggregator_fetch(self):38 """Test that Aggregator task fetches the source"""39 aggregator_task = tasks.Aggregator(self.config)40 url = 'https://raw.githubusercontent.com/frictionlessdata/goodtables-py/v1.0.0-alpha8/data/valid.csv'41 utilities.set_up_cache_dir(aggregator_task.cache_dir)42 pipeline_instance = pipeline.Pipeline(data=url, format='csv',43 post_task=aggregator_task.run)44 pipeline_instance.run()45 file_names = []46 for file_name in os.listdir(aggregator_task.cache_dir):47 file_names.append(file_name)48 self.assertEquals(file_names,['valid.csv'])49 def test_aggregator_assess_timeliness(self):50 """Test that Aggregator calls the RelevancePeriodExtractor"""51 self.config['source_file'] = 'sources_with_period_id.csv'52 self.config['datapackage_file'] = 'datapackage_sources_with_period.json'53 self.config['assess_timeliness'] = True54 self.config['timeliness']['timeliness_strategy'] = ['period_id']55 extractor = tasks.extract_relevance_period.RelevancePeriodExtractor(self.config)56 extractor.run()57 aggregator_task = tasks.Aggregator(self.config)58 url = 'https://raw.githubusercontent.com/frictionlessdata/goodtables-py/v1.0.0-alpha8/data/valid.csv'59 pipeline_instance = pipeline.Pipeline(data=url, format='csv',60 post_task=aggregator_task.run)61 pipeline_instance.run()62 updated_sources = self.read_file_contents(aggregator_task.result_file)63 result = updated_sources[-1]64 score = int(result['score'])65 self.assertEqual(98, score)66 def tests_aggreate_scoring(self):67 """Test Aggregator scoring"""68 aggregator_task = tasks.Aggregator(self.config)69 url = 'https://raw.githubusercontent.com/frictionlessdata/goodtables-py/v1.0.0-alpha8/data/empty_rows_multiple.csv'70 schema = 'https://raw.githubusercontent.com/frictionlessdata/goodtables-py/v1.0.0-alpha8/data/test_schema.json'71 pipeline_options = self.config['goodtables']['arguments']['pipeline']72 pipeline_options['options']['schema']['schema'] = schema73 pipeline_instance = pipeline.Pipeline(data=url, format='csv',74 post_task=aggregator_task.run,75 **pipeline_options)76 pipeline_instance.run()77 result = self.read_file_contents(aggregator_task.result_file)[-1]78 self.assertEqual(int(result['score']), 0)79 def read_file_contents(self, file_name):80 """Return file contents as list of dicts"""81 contents = []82 with compat.UnicodeDictReader(file_name) as src_file:83 for line in src_file:84 contents.append(line)...
aYo_database.py
Source:aYo_database.py
1#from csv import reader2"""3 Manages the aYo Database4 To use:5 this must be in your header -> from aYo_database import Database6 Functions to be used outside of this file:7 Check_Login("username here", "password here")8 this will return true if login is ok9 CreateUserProfile("username here", "password here")10 will return false if the username is taken11 will return true if the username is not taken and the database was updated12"""13import os14from .aYo_decode import Decode15from .aYo_encode import Encode16decoder = Decode()17encoder = Encode()18current_path = os.getcwd()19un_file_path = os.path.abspath(os.path.join(current_path, "database", "aYo_un_database.csv"))20pw_file_path = os.path.abspath(os.path.join(current_path, "database", "aYo_pw_database.csv"))21def Read_File_Contents(file_name):22 # r tells python to open in read-only mode23 file_object_input = open(file_name, "r")24 # -> place all of the file in file_contents25 file_contents = file_object_input.read()26 file_object_input.close()27 return file_contents28# end readFileContents29def Get_List(text):30 the_list = []31 lines = text.split( '\n' )32 # removes all empty lines (especially the last one ...)33 lines = [ line for line in lines if line.strip() ]34 for i, line in enumerate( lines ):35 the_list.append(line)36 return the_list37class Database:38 from .aYo_decode import Decode39 from .aYo_encode import Encode40 decoder = Decode()41 encoder = Encode()42 def Check_If_Username_In_Use(self, name):43 user_names = decoder.decode(Read_File_Contents(un_file_path))44 if user_names.find("" + name) > -1:45 return True46 else:47 return False48 def Check_Login(name, password ):49 user_names = decoder.decode(Read_File_Contents(un_file_path))50 the_passwords = decoder.decode(Read_File_Contents(pw_file_path))51 52 username_list = Get_List(user_names)53 password_list = Get_List(the_passwords)54 55 check_user = user_names.find(name)56 57 if check_user > -1:58 if password_list[(username_list.index(name))] == password:59 return True60 return False61 62 def Create_User_Profile(name, password):63 64 user_names = decoder.decode(Read_File_Contents(un_file_path))65 check = False66 if user_names.find("" + name) == -1:67 check = True68 if check == True:69 file_object_output = open(un_file_path,"a")70 file_object_output.write(encoder.encode(name) + "\n")71 file_object_output.close()72 file_object_output = open(pw_file_path, "a")73 file_object_output.write(encoder.encode(password) + "\n")74 file_object_output.close()...
get_wavelength_eV_and_osc.py
Source:get_wavelength_eV_and_osc.py
1#!/usr/bin/python2read_file_name = ENTER YOUR .out FILENAME HERE3read_file = open(read_file_name, 'r')4read_file_contents = read_file.readlines()5write_file_name = 'wv_ev_osc.txt'6write_file = open (write_file_name, 'a')7write_file.write('list_of_wavelengths = [')8skip_first_counter = 09es_index = 010for line in read_file_contents:11 if line[:8] == ' Excited':12 if skip_first_counter == 0:13 skip_first_counter += 114 es_index += 115 elif es_index == 119:16 new_wavelength = "'" + (line[50:60] + "'")17 write_file.write(new_wavelength)18 else:19 new_wavelength = "'" + (line[50:60] + "',")20 write_file.write(new_wavelength)21 es_index += 122write_file.write(']\n\n\n')23write_file.write('list_of_eV = [')24skip_first_counter = 025es_index = 026for line in read_file_contents:27 if line[:8] == ' Excited':28 if skip_first_counter == 0:29 skip_first_counter += 130 es_index += 131 elif es_index == 119:32 new_osc = "'" + (line[62:70] + "'")33 write_file.write(new_osc)34 else:35 new_osc = "'" + (line[62:70] + "',")36 write_file.write(new_osc)37 es_index += 138write_file.write(']\n\n\n')39write_file.write('list_of_osc = [')40skip_first_counter = 041es_index = 042for line in read_file_contents:43 if line[:8] == ' Excited':44 if skip_first_counter == 0:45 skip_first_counter += 146 es_index += 147 elif es_index == 119:48 new_eV = "'" + (line[40:49] + "'")49 write_file.write(new_eV)50 else:51 new_eV = "'" + (line[40:49] + "',")52 write_file.write(new_eV)53 es_index += 1...
setup.py
Source:setup.py
1'''Vicon Core API PyPI setup.'''2import os3from setuptools import setup4_THIS_DIR = os.path.abspath(os.path.dirname(__file__))5def read_file_contents(file_path):6 '''Reads the contents of a file returning it as a string.'''7 try:8 with open(file_path) as file_:9 return file_.read()10 except: #pylint: disable=W070211 print "Failed to read [" + file_path + "]"12 return ''13README = read_file_contents(os.path.join(_THIS_DIR, 'README.rst'))14CHANGES = read_file_contents(os.path.join(_THIS_DIR, 'CHANGES.rst'))15VERSION = read_file_contents(os.path.join(_THIS_DIR, 'VERSION')).strip()16setup(name='vicon_core_api',17 version=VERSION,18 packages=['vicon_core_api'],19 install_requires=['enum34'],20 description='Core API components for remote monitoring and control of Vicon applications.',21 long_description=README + '\n\n' + CHANGES,22 long_description_content_type='text/x-rst',23 author='Vicon Motion Systems Ltd',24 author_email='support@vicon.com',25 url='https://vicon.com/support',26 classifiers=(27 "Programming Language :: Python :: 2",28 "License :: OSI Approved :: MIT License",29 "Operating System :: OS Independent"),...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!