Best Python code snippet using autotest_python
schedule_jobs.py
Source:schedule_jobs.py
...80 clear_jobs(client=client, project_id=project_id, location_id=location_id)81 # Read the list of all known locations, since we will be splitting some jobs based on that82 location_keys = list(table_read_column(SRC / "data" / "metadata.csv", "key"))83 # Cache pull job runs hourly84 _schedule_job(schedule="0 * * * *", path="/deferred/cache_pull")85 # Get new errors once a day at midday.86 _schedule_job(path="/deferred/report_errors_to_github", schedule="0 12 * * *")87 # Keep track of the different job groups to only output them once88 job_urls_seen = set()89 for data_pipeline in get_pipelines():90 # The job that combines data sources into a table runs hourly91 _schedule_job(92 path=f"/deferred/combine_table?table={data_pipeline.table}",93 # Offset by 15 minutes to let other hourly tasks finish94 schedule="15 * * * *",95 )96 for idx, data_source in enumerate(data_pipeline.data_sources):97 automation_opts = data_source.config.get("automation", {})98 # The job to pull each individual data source runs hourly unless specified otherwise99 job_sched = automation_opts.get("schedule", "0 * * * *")100 # If the job is deferred, then prepend the token to the path101 job_prefix = "/deferred" if automation_opts.get("deferred") else ""102 # Each data source has a job group. All data sources within the same job group are run103 # as part of the same job in series. The default job group is the index of the data104 # source.105 job_group = automation_opts.get("job_group", idx)106 job_url = f"{job_prefix}/update_table?table={data_pipeline.table}&job_group={job_group}"107 if job_url not in job_urls_seen:108 job_urls_seen.add(job_url)109 _schedule_job(path=job_url, schedule=job_sched)110 ########111 # V2 publish jobs112 ########113 # The job that publishes combined tables into the prod bucket runs every 2 hours114 _schedule_job(115 # Run in a separate, preemptible instance116 path="/deferred/publish_tables",117 # Offset by 30 minutes to let other hourly tasks finish118 schedule="30 */2 * * *",119 )120 # The job that publishes aggregate outputs runs every 4 hours121 _schedule_job(122 # Run in a separate, preemptible instance123 path="/deferred/publish_main_table",124 # Offset by 60 minutes to let other hourly tasks finish125 schedule="0 1-23/4 * * *",126 )127 # The job that publishes breakdown outputs runs every 4 hours128 _schedule_job(129 path="/deferred/publish_subset_tables",130 # Offset by 90 minutes to run after publishing131 schedule="30 1-23/4 * * *",132 )133 # Converting the outputs to JSON is less critical but also slow so it's run separately134 _schedule_job(135 path=f"/deferred/publish_json_tables?prod_folder=v2",136 # Offset by 120 minutes to run after subset tables are published137 schedule="0 2-23/4 * * *",138 )139 for subset in _split_into_subsets(location_keys, bin_count=5):140 job_params = f"prod_folder=v2&location_key_from={subset[0]}&location_key_until={subset[-1]}"141 _schedule_job(142 path=f"/deferred/publish_json_locations?{job_params}",143 # Offset by 120 minutes to run after subset tables are published144 schedule="0 2-23/4 * * *",145 )146 ########147 # V3 publish jobs148 ########149 # Publish the global tables (with all location keys) every 2 hours150 _schedule_job(151 path="/deferred/publish_v3_global_tables",152 # Offset by 30 minutes to let other hourly tasks finish153 schedule="30 */2 * * *",154 )155 # Convert the global tables to JSON156 _schedule_job(157 path=f"/deferred/publish_json_tables?prod_folder=v3",158 # Offset by 60 minutes to execute after publish_v3_global_tables finishes159 schedule="0 1-23/2 * * *",160 )161 # Break down the outputs by location key every 2 hours, and execute the job in chunks162 for subset in _split_into_subsets(location_keys, bin_count=5):163 job_params = f"location_key_from={subset[0]}&location_key_until={subset[-1]}"164 _schedule_job(165 path=f"/deferred/publish_v3_location_subsets?{job_params}",166 # Offset by 60 minutes to execute after publish_v3_global_tables finishes167 schedule="0 1-23/2 * * *",168 )169 # Publish the main aggregated table every 2 hours170 _schedule_job(171 path="/deferred/publish_v3_main_table",172 # Offset by 90 minutes to execute after publish_v3_location_subsets finishes173 schedule="30 1-23/2 * * *",174 )175 # Publish outputs in JSON format every 2 hours, and execute the job in chunks176 for subset in _split_into_subsets(location_keys, bin_count=5):177 job_params = f"prod_folder=v3&location_key_from={subset[0]}&location_key_until={subset[-1]}"178 _schedule_job(179 path=f"/deferred/publish_json_locations?{job_params}",180 # Offset by 90 minutes to execute after publish_v3_location_subsets finishes181 schedule="30 1-23/2 * * *",182 )183if __name__ == "__main__":184 # Get default values from environment185 default_project = os.environ.get("GCP_PROJECT")186 default_location = os.environ.get("GCP_LOCATION", GCP_LOCATION)187 default_time_zone = os.environ.get("GCP_TIME_ZONE", "America/New_York")188 # Parse arguments from the command line189 argparser = ArgumentParser()190 argparser.add_argument("--project-id", type=str, default=default_project)191 argparser.add_argument("--location-id", type=str, default=default_location)192 argparser.add_argument("--time-zone", type=str, default=default_time_zone)...
test_scheduled_jobs.py
Source:test_scheduled_jobs.py
...30 self.connection.flushall()31 self.scheduler = Scheduler('test_queue', connection=self.connection)32 def test_adds_scheduled_job_with_interval(self):33 interval = 734 _schedule_job(a_function, interval, self.scheduler)35 sched_jobs = self.scheduler.get_jobs()36 assert len(sched_jobs) == 1, sched_jobs37 assert sched_jobs[0].meta['interval'] == interval, sched_jobs[0].meta38 def test_adds_several_jobs_(self):39 _schedule_job(a_function, 1, self.scheduler)40 _schedule_job(another_function, 1, self.scheduler)41 sched_jobs = self.scheduler.get_jobs()42 job_func_names = [job.func_name for job in sched_jobs]43 assert len(sched_jobs) == 2, sched_jobs44 assert 'test_scheduled_jobs.a_function' in job_func_names45 assert 'test_scheduled_jobs.another_function' in job_func_names46 def test_does_not_add_job_if_already_added(self):47 _schedule_job(a_function, 1, self.scheduler)48 _schedule_job(a_function, 1, self.scheduler)49 sched_jobs = self.scheduler.get_jobs()50 assert len(sched_jobs) == 1, sched_jobs51 def test_returns_log_messages(self):52 success_message = _schedule_job(a_function, 1, self.scheduler)53 failure_message = _schedule_job(a_function, 1, self.scheduler)54 assert success_message == 'Scheduled a_function to run every 1 seconds'55 assert failure_message == 'Job a_function is already scheduled'56 def test_failed_attempt_to_schedule_does_not_polute_redis(self):57 _schedule_job(a_function, 1, self.scheduler)58 _schedule_job(a_function, 1, self.scheduler)59 stored_values = self.connection.keys('rq:job*')...
jobs.py
Source:jobs.py
...29 kwargs: dict30def _job_missed_listener(event):31 # TODO Log an notify about missed jobs32 logger.warn("A job has been missed")33def _schedule_job(message: _Message):34 """Schedule a job from a message."""35 _SCHEDULER.add_job(message.function, message.trigger, **message.kwargs)36def schedule_job(func, trigger, **kwargs):37 """Schedule a job."""38 message = _Message(function=func, trigger=trigger, kwargs=kwargs)39 if UWSGI:40 uwsgi.mule_msg(pickle.dumps(message), MULE_NUM)41 else:42 _schedule_job(message)43def run_worker(is_mule=True):44 """Run the jobs worker."""45 atexit.register(_SCHEDULER.shutdown)46 _SCHEDULER.add_listener(_job_missed_listener, events.EVENT_JOB_MISSED)47 _SCHEDULER.start()48 if not is_mule:49 return50 while True:51 message: _Message = pickle.loads(uwsgi.mule_get_msg())...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!