Best Python code snippet using locust
client_session.py
Source:client_session.py
...28 data = self._get_default_post_data()29 data['name'] = username30 data['password'] = password31 r = self._session.post(url, data=data)32 r.raise_for_status()33 return json.loads(r.content.decode())34 def whoami(self):35 url = self._build_url(self._user_auth_path + '/whoami/')36 data = self._get_default_post_data()37 r = self._session.post(url, data=data)38 r.raise_for_status()39 return r.content.decode()40 def logout(self):41 url = self._build_url(self._user_auth_path + '/logout/')42 data = self._get_default_post_data()43 r = self._session.post(url, data=data)44 r.raise_for_status()45 return json.loads(r.content.decode())46 def user_info(self, page=None, page_size=50):47 url = self._build_url(self._user_auth_path + '/info/')48 data = self._get_default_post_data()49 data['page'] = page50 data['page_size'] = page_size51 r = self._session.post(url, data)52 r.raise_for_status()53 return json.loads(r.content.decode())54 def user_count(self):55 url = self._build_url(self._user_auth_path + '/count/')56 data = self._get_default_post_data()57 r = self._session.post(url, data)58 r.raise_for_status()59 return json.loads(r.content.decode())60 def user_create(self, username, password, hourly_limit, request_limit):61 url = self._build_url(self._user_auth_path + '/create/')62 data = self._get_default_post_data()63 data['name'] = username64 data['password'] = password65 data['hourly_limit'] = hourly_limit66 data['request_limit'] = request_limit67 r = self._session.post(url, data=data)68 r.raise_for_status()69 return json.loads(r.content.decode())70 def user_change_password(self, username, password):71 url = self._build_url(self._user_auth_path + '/change_password/')72 data = self._get_default_post_data()73 data['name'] = username74 data['password'] = password75 r = self._session.post(url, data=data)76 r.raise_for_status()77 return json.loads(r.content.decode())78 def user_delete(self, username):79 url = self._build_url(self._user_auth_path + '/delete/')80 data = self._get_default_post_data()81 data['name'] = username82 r = self._session.post(url, data=data)83 r.raise_for_status()84 return json.loads(r.content.decode())85 def user_set_hourly_limit(self, username, hourly_limit):86 url = self._build_url(self._user_auth_path + '/set_hourly_limit/')87 data = self._get_default_post_data()88 data['name'] = username89 data['hourly_limit'] = hourly_limit90 r = self._session.post(url, data=data)91 r.raise_for_status()92 return json.loads(r.content.decode())93 def user_set_request_limit(self, username, request_limit):94 url = self._build_url(self._user_auth_path + '/set_request_limit/')95 data = self._get_default_post_data()96 data['name'] = username97 data['request_limit'] = request_limit98 r = self._session.post(url, data=data)99 r.raise_for_status()100 return json.loads(r.content.decode())101 def user_set_current_request_counter(self, username, count=0):102 url = self._build_url(self._user_auth_path + '/set_current_request_counter/')103 data = self._get_default_post_data()104 data['name'] = username105 data['count'] = count106 r = self._session.post(url, data=data)107 r.raise_for_status()108 return json.loads(r.content.decode())109 def user_version(self):110 url = self._build_url(self._user_auth_path + '/version/')111 data = self._get_default_post_data()112 r = self._session.post(url, data)113 r.raise_for_status()114 return json.loads(r.content.decode())115 # key auth116 def key_info(self, page=None, page_size=50):117 url = self._build_url(self._key_auth_path + '/info/')118 data = self._get_default_post_data()119 data['page'] = page120 data['page_size'] = page_size121 r = self._session.post(url, data)122 r.raise_for_status()123 return json.loads(r.content.decode())124 def key_count(self):125 url = self._build_url(self._key_auth_path + '/count/')126 data = self._get_default_post_data()127 r = self._session.post(url, data)128 r.raise_for_status()129 return json.loads(r.content.decode())130 def key_use(self, key):131 self._default_data['authkey'] = key132 return 'Ok'133 def key_disable_use(self):134 try:135 del self._default_data['authkey']136 except KeyError:137 pass138 return 'Ok'139 def key_create(self, name, hourly_limit, request_limit):140 url = self._build_url(self._key_auth_path + '/create/')141 data = self._get_default_post_data()142 data['name'] = name143 data['hourly_limit'] = hourly_limit144 data['request_limit'] = request_limit145 r = self._session.post(url, data=data)146 r.raise_for_status()147 return json.loads(r.content.decode())148 def key_delete(self, key):149 url = self._build_url(self._key_auth_path + '/delete/')150 data = self._get_default_post_data()151 data['key'] = key152 r = self._session.post(url, data=data)153 r.raise_for_status()154 return json.loads(r.content.decode())155 def key_set_hourly_limit(self, key, hourly_limit):156 url = self._build_url(self._key_auth_path + '/set_hourly_limit/')157 data = self._get_default_post_data()158 data['key'] = key159 data['hourly_limit'] = hourly_limit160 r = self._session.post(url, data=data)161 r.raise_for_status()162 return json.loads(r.content.decode())163 def key_set_request_limit(self, key, request_limit):164 url = self._build_url(self._key_auth_path + '/set_request_limit/')165 data = self._get_default_post_data()166 data['key'] = key167 data['request_limit'] = request_limit168 r = self._session.post(url, data=data)169 r.raise_for_status()170 return json.loads(r.content.decode())171 def key_set_current_request_counter(self, key, count=0):172 url = self._build_url(self._key_auth_path + '/set_current_request_counter/')173 data = self._get_default_post_data()174 data['key'] = key175 data['count'] = count176 r = self._session.post(url, data=data)177 r.raise_for_status()178 return json.loads(r.content.decode())179 def key_version(self):180 url = self._build_url(self._key_auth_path + '/version/')181 data = self._get_default_post_data()182 r = self._session.post(url, data)183 r.raise_for_status()184 return json.loads(r.content.decode())185 # ip auth186 def ip_info(self, page=None, page_size=50):187 url = self._build_url(self._ip_auth_path + '/info/')188 data = self._get_default_post_data()189 data['page'] = page190 data['page_size'] = page_size191 r = self._session.post(url, data)192 r.raise_for_status()193 return json.loads(r.content.decode())194 def ip_count(self):195 url = self._build_url(self._ip_auth_path + '/count/')196 data = self._get_default_post_data()197 r = self._session.post(url, data)198 r.raise_for_status()199 return json.loads(r.content.decode())200 def ip_create(self, ip, hourly_limit, request_limit):201 url = self._build_url(self._ip_auth_path + '/create/')202 data = self._get_default_post_data()203 data['ip'] = ip204 data['hourly_limit'] = hourly_limit205 data['request_limit'] = request_limit206 r = self._session.post(url, data=data)207 r.raise_for_status()208 return json.loads(r.content.decode())209 def ip_delete(self, ip):210 url = self._build_url(self._ip_auth_path + '/delete/')211 data = self._get_default_post_data()212 data['ip'] = ip213 r = self._session.post(url, data=data)214 r.raise_for_status()215 return json.loads(r.content.decode())216 def ip_set_hourly_limit(self, ip, hourly_limit):217 url = self._build_url(self._ip_auth_path + '/set_hourly_limit/')218 data = self._get_default_post_data()219 data['ip'] = ip220 data['hourly_limit'] = hourly_limit221 r = self._session.post(url, data=data)222 r.raise_for_status()223 return json.loads(r.content.decode())224 def ip_set_request_limit(self, ip, request_limit):225 url = self._build_url(self._ip_auth_path + '/set_request_limit/')226 data = self._get_default_post_data()227 data['ip'] = ip228 data['request_limit'] = request_limit229 r = self._session.post(url, data=data)230 r.raise_for_status()231 return json.loads(r.content.decode())232 def ip_set_current_request_counter(self, ip, count=0):233 url = self._build_url(self._ip_auth_path + '/set_current_request_counter/')234 data = self._get_default_post_data()235 data['ip'] = ip236 data['count'] = count237 r = self._session.post(url, data=data)238 r.raise_for_status()239 return json.loads(r.content.decode())240 def ip_version(self):241 url = self._build_url(self._ip_auth_path + '/version/')242 data = self._get_default_post_data()243 r = self._session.post(url, data)244 r.raise_for_status()245 return json.loads(r.content.decode())246 # jobs247 def job_info(self, page=None, page_size=50):248 url = self._build_url(self._jobs_path + '/info/')249 data = self._get_default_post_data()250 data['page'] = page251 data['page_size'] = page_size252 r = self._session.post(url, data)253 r.raise_for_status()254 return json.loads(r.content.decode())255 def job_count(self):256 url = self._build_url(self._jobs_path + '/count/')257 data = self._get_default_post_data()258 r = self._session.post(url, data)259 r.raise_for_status()260 return json.loads(r.content.decode())261 def job_delete(self, id):262 url = self._build_url(self._jobs_path + '/delete/')263 data = self._get_default_post_data()264 data['id'] = str(id)265 r = self._session.post(url, data=data)266 r.raise_for_status()267 return json.loads(r.content.decode())268 def job_rerun(self, id):269 url = self._build_url(self._jobs_path + '/rerun/')270 data = self._get_default_post_data()271 data['id'] = str(id)272 r = self._session.post(url, data=data)273 r.raise_for_status()274 return json.loads(r.content.decode())275 def job_delete_all(self):276 url = self._build_url(self._jobs_path + '/delete_all/')277 data = self._get_default_post_data()278 r = self._session.post(url, data=data)279 r.raise_for_status()280 return json.loads(r.content.decode())281 def job_delete_all_done(self):282 url = self._build_url(self._jobs_path + '/delete_all_done/')283 data = self._get_default_post_data()284 r = self._session.post(url, data=data)285 r.raise_for_status()286 return json.loads(r.content.decode())287 def job_delete_all_not_running(self):288 url = self._build_url(self._jobs_path + '/delete_all_not_running/')289 data = self._get_default_post_data()290 r = self._session.post(url, data=data)291 r.raise_for_status()292 return json.loads(r.content.decode())293 def job_delete_all_errors(self):294 url = self._build_url(self._jobs_path + '/delete_all_errors/')295 data = self._get_default_post_data()296 r = self._session.post(url, data=data)297 r.raise_for_status()298 return json.loads(r.content.decode())299 def job_completed(self, id):300 url = self._build_url(self._jobs_path + '/completed/')301 data = self._get_default_post_data()302 data['id'] = str(id)303 r = self._session.post(url, data=data)304 r.raise_for_status()305 return json.loads(r.content.decode())306 def job_wait(self, job_ids, loop_wait=1):307 for job_id in job_ids:308 while not self.job_completed(job_id):309 sleep(loop_wait)310 return 'Ok'311 def job_lock_info(self, page=None, page_size=50):312 url = self._build_url(self._jobs_path + '/lock_info/')313 data = self._get_default_post_data()314 data['page'] = page315 data['page_size'] = page_size316 r = self._session.post(url, data)317 r.raise_for_status()318 return json.loads(r.content.decode())319 def job_lock_count(self):320 url = self._build_url(self._jobs_path + '/lock_count/')321 data = self._get_default_post_data()322 r = self._session.post(url, data)323 r.raise_for_status()324 return json.loads(r.content.decode())325 def job_lock_delete(self, name):326 url = self._build_url(self._jobs_path + '/lock_delete/')327 data = self._get_default_post_data()328 data['name'] = name329 r = self._session.post(url, data=data)330 r.raise_for_status()331 return json.loads(r.content.decode())332 def job_version(self):333 url = self._build_url(self._jobs_path + '/version/')334 data = self._get_default_post_data()335 r = self._session.post(url, data)336 r.raise_for_status()337 return json.loads(r.content.decode())338 # classifiers339 def classifier_info(self, page=None, page_size=50):340 url = self._build_url(self._classifier_path + '/info/')341 data = self._get_default_post_data()342 data['page'] = page343 data['page_size'] = page_size344 r = self._session.post(url, data)345 r.raise_for_status()346 return json.loads(r.content.decode())347 def classifier_count(self):348 url = self._build_url(self._classifier_path + '/count/')349 data = self._get_default_post_data()350 r = self._session.post(url, data)351 r.raise_for_status()352 return json.loads(r.content.decode())353 def classifier_types(self):354 url = self._build_url(self._classifier_path + '/classifier_types/')355 data = self._get_default_post_data()356 r = self._session.post(url, data)357 r.raise_for_status()358 return json.loads(r.content.decode())359 def classifier_create(self, name, labels, type, overwrite=False):360 url = self._build_url(self._classifier_path + '/create/')361 data = self._get_default_post_data()362 data['name'] = name363 data['labels'] = labels364 data['type'] = type365 data['overwrite'] = overwrite366 r = self._session.post(url, data=data)367 r.raise_for_status()368 return json.loads(r.content.decode())369 def classifier_delete(self, name):370 url = self._build_url(self._classifier_path + '/delete/')371 data = self._get_default_post_data()372 data['name'] = name373 r = self._session.post(url, data=data)374 r.raise_for_status()375 return json.loads(r.content.decode())376 def classifier_update(self, name, X, y):377 url = self._build_url(self._classifier_path + '/update/')378 data = self._get_default_post_data()379 data['name'] = name380 data['X'] = X381 data['y'] = y382 r = self._session.post(url, data=data)383 r.raise_for_status()384 return json.loads(r.content.decode())385 def classifier_set_description(self, name, description):386 url = self._build_url(self._classifier_path + '/set_description/')387 data = self._get_default_post_data()388 data['name'] = name389 data['description'] = description390 r = self._session.post(url, data=data)391 r.raise_for_status()392 return json.loads(r.content.decode())393 def classifier_set_public(self, name, public):394 url = self._build_url(self._classifier_path + '/set_public/')395 data = self._get_default_post_data()396 data['name'] = name397 data['public'] = public398 r = self._session.post(url, data=data)399 r.raise_for_status()400 return json.loads(r.content.decode())401 def classifier_rename(self, name, new_name, overwrite=False):402 url = self._build_url(self._classifier_path + '/rename/')403 data = self._get_default_post_data()404 data['name'] = name405 data['new_name'] = new_name406 data['overwrite'] = overwrite407 r = self._session.post(url, data=data)408 r.raise_for_status()409 return json.loads(r.content.decode())410 def classifier_label_info(self, name):411 url = self._build_url(self._classifier_path + '/label_info/')412 data = self._get_default_post_data()413 data['name'] = name414 r = self._session.post(url, data)415 r.raise_for_status()416 return json.loads(r.content.decode())417 def classifier_label_rename(self, name, label_name, new_label_name):418 url = self._build_url(self._classifier_path + '/label_rename/')419 data = self._get_default_post_data()420 data['name'] = name421 data['label_name'] = label_name422 data['new_label_name'] = new_label_name423 r = self._session.post(url, data=data)424 r.raise_for_status()425 return json.loads(r.content.decode())426 def classifier_label_add(self, name, label_name):427 url = self._build_url(self._classifier_path + '/label_add/')428 data = self._get_default_post_data()429 data['name'] = name430 data['label_name'] = label_name431 r = self._session.post(url, data=data)432 r.raise_for_status()433 return json.loads(r.content.decode())434 def classifier_label_delete(self, name, label_name):435 url = self._build_url(self._classifier_path + '/label_delete/')436 data = self._get_default_post_data()437 data['name'] = name438 data['label_name'] = label_name439 r = self._session.post(url, data=data)440 r.raise_for_status()441 return json.loads(r.content.decode())442 def classifier_download_training_data(self, name, file, chunk_size=2048):443 url = self._build_url(self._classifier_path + '/download_training_data/')444 data = self._get_default_post_data()445 data['name'] = name446 r = self._session.post(url, data, stream=True)447 for chunk in r.iter_content(chunk_size=chunk_size, decode_unicode=True):448 if chunk:449 file.write(chunk)450 return 'Ok'451 def classifier_download_model(self, name, file, chunk_size=2048):452 url = self._build_url(self._classifier_path + '/download_model/')453 data = self._get_default_post_data()454 data['name'] = name455 r = self._session.post(url, data, stream=True)456 for chunk in r.iter_content(chunk_size=chunk_size):457 if chunk:458 file.write(chunk)459 return 'Ok'460 def classifier_upload_training_data(self, file, type):461 url = self._build_url(self._classifier_path + '/upload_training_data/')462 data = self._get_default_post_data()463 data['type'] = type464 files = {'file': file}465 r = self._session.post(url, data=data, files=files)466 r.raise_for_status()467 return json.loads(r.content.decode())468 def classifier_upload_model(self, name, file, overwrite=False):469 url = self._build_url(self._classifier_path + '/upload_model/')470 data = self._get_default_post_data()471 data['name'] = name472 data['overwrite'] = overwrite473 files = {'file': file}474 r = self._session.post(url, data=data, files=files)475 r.raise_for_status()476 return json.loads(r.content.decode())477 def classifier_classify(self, name, X):478 url = self._build_url(self._classifier_path + '/classify/')479 data = self._get_default_post_data()480 data['name'] = name481 data['X'] = X482 r = self._session.post(url, data=data)483 r.raise_for_status()484 return json.loads(r.content.decode())485 def classifier_score(self, name, X):486 url = self._build_url(self._classifier_path + '/score/')487 data = self._get_default_post_data()488 data['name'] = name489 data['X'] = X490 r = self._session.post(url, data=data)491 r.raise_for_status()492 return json.loads(r.content.decode())493 def classifier_extract(self, name, labels, type):494 url = self._build_url(self._classifier_path + '/extract/')495 data = self._get_default_post_data()496 data['name'] = name497 data['labels'] = labels498 data['type'] = type499 r = self._session.post(url, data=data)500 r.raise_for_status()501 return json.loads(r.content.decode())502 def classifier_merge(self, name, sources, type, overwrite=False):503 url = self._build_url(self._classifier_path + '/merge/')504 data = self._get_default_post_data()505 data['name'] = name506 data['sources'] = sources507 data['type'] = type508 data['overwrite'] = overwrite509 r = self._session.post(url, data=data)510 r.raise_for_status()511 return json.loads(r.content.decode())512 def classifier_version(self):513 url = self._build_url(self._classifier_path + '/version/')514 data = self._get_default_post_data()515 r = self._session.post(url, data)516 r.raise_for_status()517 return json.loads(r.content.decode())518 # datasets519 def dataset_info(self, page=None, page_size=50):520 url = self._build_url(self._dataset_path + '/info/')521 data = self._get_default_post_data()522 data['page'] = page523 data['page_size'] = page_size524 r = self._session.post(url, data)525 r.raise_for_status()526 return json.loads(r.content.decode())527 def dataset_count(self):528 url = self._build_url(self._dataset_path + '/count/')529 data = self._get_default_post_data()530 r = self._session.post(url, data)531 r.raise_for_status()532 return json.loads(r.content.decode())533 def dataset_create(self, name):534 url = self._build_url(self._dataset_path + '/create/')535 data = self._get_default_post_data()536 data['name'] = name537 r = self._session.post(url, data)538 r.raise_for_status()539 return json.loads(r.content.decode())540 def dataset_add_document(self, dataset_name, document_name, document_content):541 url = self._build_url(self._dataset_path + '/add_document/')542 data = self._get_default_post_data()543 data['name'] = dataset_name544 data['document_name'] = document_name545 data['document_content'] = document_content546 r = self._session.post(url, data=data)547 r.raise_for_status()548 return json.loads(r.content.decode())549 def dataset_delete_document(self, dataset_name, document_name):550 url = self._build_url(self._dataset_path + '/delete_document/')551 data = self._get_default_post_data()552 data['name'] = dataset_name553 data['document_name'] = document_name554 r = self._session.post(url, data=data)555 r.raise_for_status()556 return json.loads(r.content.decode())557 def dataset_set_description(self, name, description):558 url = self._build_url(self._dataset_path + '/set_description/')559 data = self._get_default_post_data()560 data['name'] = name561 data['description'] = description562 r = self._session.post(url, data=data)563 r.raise_for_status()564 return json.loads(r.content.decode())565 def dataset_rename(self, name, newname):566 url = self._build_url(self._dataset_path + '/rename/')567 data = self._get_default_post_data()568 data['name'] = name569 data['newname'] = newname570 r = self._session.post(url, data=data)571 r.raise_for_status()572 return json.loads(r.content.decode())573 def dataset_delete(self, name):574 url = self._build_url(self._dataset_path + '/delete/')575 data = self._get_default_post_data()576 data['name'] = name577 r = self._session.post(url, data=data)578 r.raise_for_status()579 return json.loads(r.content.decode())580 def dataset_upload(self, name, file):581 url = self._build_url(self._dataset_path + '/upload/')582 files = {'file': file}583 data = self._get_default_post_data()584 data['name'] = name585 r = self._session.post(url, data=data, files=files)586 r.raise_for_status()587 return json.loads(r.content.decode())588 def dataset_download(self, name, file, chunk_size=2048):589 url = self._build_url(self._dataset_path + '/download/')590 data = self._get_default_post_data()591 data['name'] = name592 r = self._session.post(url, data=data, stream=True)593 for chunk in r.iter_content(chunk_size=chunk_size, decode_unicode=True):594 if chunk:595 file.write(chunk)596 return 'Ok'597 def dataset_size(self, name):598 url = self._build_url(self._dataset_path + '/size/')599 data = self._get_default_post_data()600 data['name'] = name601 r = self._session.post(url, data)602 r.raise_for_status()603 return json.loads(r.content.decode())604 def dataset_document_by_name(self, dataset_name, document_name):605 url = self._build_url(self._dataset_path + '/document_by_name/')606 data = self._get_default_post_data()607 data['name'] = dataset_name608 data['document_name'] = document_name609 r = self._session.post(url, data=data)610 r.raise_for_status()611 return json.loads(r.content.decode())612 def dataset_document_by_position(self, name, position):613 url = self._build_url(self._dataset_path + '/document_by_position/')614 data = self._get_default_post_data()615 data['name'] = name616 data['position'] = position617 r = self._session.post(url, data=data)618 r.raise_for_status()619 return json.loads(r.content.decode())620 def dataset_most_uncertain_document_id(self, dataset_name, classifier_name):621 url = self._build_url(self._dataset_path + '/most_uncertain_document_id/')622 data = self._get_default_post_data()623 data['name'] = dataset_name624 data['classifier_name'] = classifier_name625 r = self._session.post(url, data=data)626 r.raise_for_status()627 return json.loads(r.content.decode())628 def dataset_most_certain_document_id(self, dataset_name, classifier_name):629 url = self._build_url(self._dataset_path + '/most_certain_document_id/')630 data = self._get_default_post_data()631 data['name'] = dataset_name632 data['classifier_name'] = classifier_name633 r = self._session.post(url, data=data)634 r.raise_for_status()635 return json.loads(r.content.decode())636 def dataset_classify(self, name, classifiers):637 url = self._build_url(self._dataset_path + '/classify/')638 data = self._get_default_post_data()639 data['name'] = name640 data['classifiers'] = classifiers641 r = self._session.post(url, data=data)642 r.raise_for_status()643 return json.loads(r.content.decode())644 def dataset_classification_info(self, name, page=None, page_size=50):645 url = self._build_url(self._dataset_path + '/classification_info/')646 data = self._get_default_post_data()647 data['name'] = name648 data['page'] = page649 data['page_size'] = page_size650 r = self._session.post(url, data)651 r.raise_for_status()652 return json.loads(r.content.decode())653 def datatset_classification_count(self, name):654 url = self._build_url(self._dataset_path + '/classification_count/')655 data = self._get_default_post_data()656 data['name'] = name657 r = self._session.post(url, data)658 r.raise_for_status()659 return json.loads(r.content.decode())660 def dataset_classification_download(self, id, file, chunk_size=2048):661 url = self._build_url(self._dataset_path + '/classification_download/')662 data = self._get_default_post_data()663 data['id'] = str(id)664 r = self._session.post(url, data=data, stream=True)665 for chunk in r.iter_content(chunk_size=chunk_size, decode_unicode=True):666 if chunk:667 file.write(chunk)668 return 'Ok'669 def dataset_classification_delete(self, id):670 url = self._build_url(self._dataset_path + '/classification_delete/')671 data = self._get_default_post_data()672 data['id'] = str(id)673 r = self._session.post(url, data=data)674 r.raise_for_status()675 return json.loads(r.content.decode())676 def dataset_version(self):677 url = self._build_url(self._dataset_path + '/version/')678 data = self._get_default_post_data()679 r = self._session.post(url, data)680 r.raise_for_status()681 return json.loads(r.content.decode())682 def version(self):683 return "0.3.2"684if __name__ == '__main__':685 protocol = 'http'686 host = 'label.esuli.it'687 port = 80688 classifier_path = 'service/classifiers'689 dataset_path = 'service/datasets'690 jobs_path = 'service/jobs'691 user_auth_path = 'service/userauth'692 ip_auth_path = 'service/ipauth'693 key_auth_path = 'service/keyauth'694 key = 'you_key_here'...
test_examples.py
Source:test_examples.py
...33 url = 'http://127.0.0.1:{}'.format(registry.app.get_port())34 if val.app.config['name'] != 'registry':35 url += '/{}'.format(val.app.config['name'])36 r = requests.post(url + '/heartbeat')37 r.raise_for_status()38 assert r.text == 'ok'39 logger.debug('Success HEARTBEAT on {} (try {})'.format(key, 1+i))40 except Exception:41 raise42 break43 class s(object):44 url = 'http://127.0.0.1:{port}'.format(port=registry.app.get_port())45 yield s46class TestMinimal(object):47 def test_minimal(self, server):48 r = requests.get(server.url + '/minimal/hello')49 r.raise_for_status()50 assert r.text == 'Hello world\n'51 def test_minimal_logs(self, server):52 r = requests.get(server.url + '/minimal/log', params=dict(n=10000))53 r.raise_for_status()54 assert b"================" in r.content55class TestRegistry(object):56 def test_registry_hello(self, server):57 r = requests.get(server.url)58 r.raise_for_status()59 assert r.text == 'This is registry\n'60 r = requests.get(server.url + '/')61 r.raise_for_status()62 assert r.text == 'This is registry\n'63 def test_registry_register(self, server):64 # Register a service named 'foo' at url 'foo_url'65 r = requests.post(server.url + '/register/foo', data='{"url": "foo_url"}')66 r.raise_for_status()67 # Register a service named 'foo' at url 'foo_url_2'68 r = requests.post(server.url + '/register/foo', data='{"url": "foo_url_2"}')69 r.raise_for_status()70 # Get urls for service 'foo'71 r = requests.get(server.url + '/register/foo')72 r.raise_for_status()73 doc = r.json()74 assert "foo" in doc75 assert len(doc["foo"]) > 176 assert doc["foo"][0]["name"] == "foo"77 assert doc["foo"][0]["info"] == {}78 assert doc["foo"][0]["_id"] == "foo_url_2"79 assert doc["foo"][1]["name"] == "foo"80 assert doc["foo"][1]["info"] == {}81 assert doc["foo"][1]["_id"] == "foo_url"82 former_id = doc["foo"][0]['id']83 for x in doc["foo"]:84 assert x['id'] <= former_id85 former_id = x['id']86 # Get urls for all services87 r = requests.get(server.url + '/register/all')88 r.raise_for_status()89 doc = r.json()90 assert "foo" in doc91 assert len(doc["foo"]) > 192 def test_registry_heartbeat(self, server):93 # This query shall be proxied to 'minimal' through 'registry'94 r = requests.post(server.url + '/heartbeat')95 r.raise_for_status()96 assert r.text == 'ok'97 def test_registry_minimal(self, server):98 # This query shall be proxied to 'minimal' through 'registry'99 r = requests.get(server.url + '/minimal/hello')100 r.raise_for_status()101 assert r.text == 'Hello world\n'102class TestTasks(object):103 def test_tasks_hello(self, server):104 r = requests.get(server.url + '/tasks')105 r.raise_for_status()106 assert r.text == 'This is tasks\n'107 r = requests.get(server.url + '/tasks/')108 r.raise_for_status()109 assert r.text == 'This is tasks\n'110 def test_tasks_action_simple(self, server):111 r = requests.put(server.url + '/tasks/action/task01/key01/stack', data={})112 r.raise_for_status()113 doc = r.json()114 assert 'after' in doc115 assert 'before' in doc116 assert doc['after']['key'] == 'key01'117 assert doc['after']['task'] == 'task01'118 assert doc['after']['_id'] == 'task01/key01'119 assert doc['after']['status'] in ['todo', 'toredo']120 def test_tasks_action_priority(self, server):121 r = requests.put(122 server.url + '/tasks/action/task01/key01/stack',123 data={},124 params={'priority': 1})125 r.raise_for_status()126 doc = r.json()127 assert 'after' in doc128 assert 'before' in doc129 assert doc['after']['key'] == 'key01'130 assert doc['after']['task'] == 'task01'131 assert doc['after']['_id'] == 'task01/key01'132 assert doc['after']['status'] in ['todo', 'toredo']133 assert doc['after']['priority'] == 1134 def test_tasks_force_simple(self, server):135 r = requests.put(server.url + '/tasks/force/task01/key01/fail', data={})136 r.raise_for_status()137 doc = r.json()138 assert 'after' in doc139 assert 'before' in doc140 assert doc['after']['key'] == 'key01'141 assert doc['after']['task'] == 'task01'142 assert doc['after']['_id'] == 'task01/key01'143 assert doc['after']['status'] == 'fail'144 def test_tasks_force_priority(self, server):145 r = requests.put(146 server.url + '/tasks/force/task01/key01/toredo',147 data={},148 params={'priority': 1})149 r.raise_for_status()150 doc = r.json()151 assert 'after' in doc152 assert 'before' in doc153 assert doc['after']['key'] == 'key01'154 assert doc['after']['task'] == 'task01'155 assert doc['after']['_id'] == 'task01/key01'156 assert doc['after']['status'] == 'toredo'157 assert doc['after']['priority'] == 1158 def test_tasks_assignOne_simple(self, server):159 while True:160 r = requests.put(server.url + '/tasks/assignOne/task01', data={})161 r.raise_for_status()162 if r.status_code != 200:163 assert r.status_code == 204164 break165 r = requests.put(server.url + '/tasks/force/task01/key01/todo', data={})166 r.raise_for_status()167 r = requests.put(server.url + '/tasks/assignOne/task01', data={})168 r.raise_for_status()169 assert r.status_code == 200170 doc = r.json()171 assert doc['key'] == 'key01'172 assert doc['task'] == 'task01'173 assert doc['status'] == 'todo'174 def test_tasks_assignOne_double(self, server):175 while True:176 r = requests.put(server.url + '/tasks/assignOne/task01', data={})177 r.raise_for_status()178 if r.status_code != 200:179 assert r.status_code == 204180 break181 r = requests.put(server.url + '/tasks/force/task01/key01/todo', data={})182 r.raise_for_status()183 r = requests.put(server.url + '/tasks/force/task01/key02/todo', data={})184 r.raise_for_status()185 r = requests.put(server.url + '/tasks/assignOne/task01', data={})186 r.raise_for_status()187 assert r.status_code == 200188 doc = r.json()189 assert doc['key'] == 'key01'190 assert doc['task'] == 'task01'191 assert doc['status'] == 'todo'192 r = requests.put(server.url + '/tasks/assignOne/task01', data={})193 r.raise_for_status()194 assert r.status_code == 200195 doc = r.json()196 assert doc['key'] == 'key02'197 assert doc['task'] == 'task01'198 assert doc['status'] == 'todo'199 r = requests.put(server.url + '/tasks/assignOne/task01', data={})200 r.raise_for_status()201 assert r.status_code == 204202 def test_get_by_key(self, server):203 r = requests.put(server.url + '/tasks/force/task01/key01/todo', data={})204 r.raise_for_status()205 r = requests.get(server.url + '/tasks/getByKey/task01/key01')206 r.raise_for_status()207 doc = r.json()208 assert doc['task'] == 'task01'209 assert doc['key'] == 'key01'210 assert doc['status'] == 'todo'211 def test_get_by_status(self, server):212 r = requests.put(server.url + '/tasks/force/task01/key01/todo', data={})213 r.raise_for_status()214 r = requests.put(server.url + '/tasks/force/task01/key02/done', data={})215 r.raise_for_status()216 r = requests.put(server.url + '/tasks/force/task01/key03/fail', data={})217 r.raise_for_status()218 r = requests.get(server.url + '/tasks/getByStatus/task01/todo%2Cdone%2Cfail', data={})219 r.raise_for_status()220 doc = r.json()221 assert 'done' in doc222 assert 'fail' in doc223 assert 'todo' in doc224 assert 'task01/key01' in [x['_id'] for x in doc['todo']]225 assert 'task01/key02' in [x['_id'] for x in doc['done']]226 assert 'task01/key03' in [x['_id'] for x in doc['fail']]227 def test_tasks_multithreading(self, server):228 def log_function(thread_id, r, operation):229 r.raise_for_status()230 open('mylog.log', 'a').write(' '.join([231 pd.Timestamp.utcnow().isoformat(),232 thread_id,233 operation,234 str(r.status_code),235 str(pd.Timedelta(r.elapsed))[-15:],236 ]) + '\n')237 def process_test(server, n=50):238 thread_id = uuid.uuid4().hex[:8]239 for i in range(n):240 r = requests.put(server + '/action/someTask/someKey/stack')241 log_function(thread_id, r, 'stack')242 r = requests.put(server + '/assignOne/someTask')243 log_function(thread_id, r, 'assignOne')244 if r.status_code == 200:245 r = requests.put(server + '/action/someTask/someKey/success')246 log_function(thread_id, r, 'success')247 # We launch 10 clients that will ask for tasks in the same time.248 open('mylog.log', 'w').write('')249 for i in range(10):250 multiprocessing.Process(target=process_test,251 args=(server.url + '/tasks',),252 ).start()253 # We wait for the clients to finish their job.254 for i in range(60):255 data = list(map(lambda x: x.strip().split(), open('mylog.log').readlines()))256 data = pd.DataFrame(257 data,258 columns=['dt', 'thread', 'action', 'code', 'duration'])259 data['dt'] = data['dt'].apply(pd.Timestamp)260 summary = data.groupby([261 'thread', 'action', 'code']).apply(len).unstack(0).T.fillna(0).astype(int)262 time.sleep(1)263 if 'stack' in summary and summary['stack', '200'].max() == 50:264 break265 # Up to there, the task mechanism has run without failures.266 assert ('stack' in summary and267 summary['stack', '200'].max() == 50), 'No thread ended his job'268 # Let's test if no task has been assigned twice in the same time.269 z = data[data.action.isin(['assignOne', 'success']) & (data.code == '200')].set_index('dt')270 z.sort_index(inplace=True)271 z['nbDoing'] = (z.action == 'assignOne').cumsum() - (z.action == 'success').cumsum()272 z['dt'] = (pd.np.diff(z.index.values).astype(int)*1e-9).tolist() + [None]273 # We check that no task was assigned twice for more than 0.1 sec.274 assert (z[z.nbDoing > 1]['dt'] < 0.1).all()275class TestPeriodicTask(object):276 def test_periodic_task(self, server):277 # We call '/latest' in a loop till at least 3 documents have been created.278 timeout = pd.Timestamp.utcnow() + pd.Timedelta(60, 's')279 while True:280 r = requests.get(server.url + '/periodictask/latest')281 r.raise_for_status()282 if r.text != 'null':283 doc = r.json()284 if doc['nb'] > 3:285 break286 elif pd.Timestamp.utcnow() > timeout:287 raise TimeoutError('Timout reached with {} docs only'.format(doc['nb']))288 print(r.text)...
api.py
Source:api.py
...42 return f"{URLs.site_api_schema}{URLs.site_api}/{quote_url(endpoint)}"43 async def close(self) -> None:44 """Close the aiohttp session."""45 await self.session.close()46 async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None:47 """Raise ResponseCodeError for non-OK response if an exception should be raised."""48 if should_raise and response.status >= 400:49 try:50 response_json = await response.json()51 raise ResponseCodeError(response=response, response_json=response_json)52 except aiohttp.ContentTypeError:53 response_text = await response.text()54 raise ResponseCodeError(response=response, response_text=response_text)55 async def request(self, method: str, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:56 """Send an HTTP request to the site API and return the JSON response."""57 async with self.session.request(method.upper(), self._url_for(endpoint), **kwargs) as resp:58 await self.maybe_raise_for_status(resp, raise_for_status)59 return await resp.json()60 async def get(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:61 """Site API GET."""62 return await self.request("GET", endpoint, raise_for_status=raise_for_status, **kwargs)63 async def patch(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:64 """Site API PATCH."""65 return await self.request("PATCH", endpoint, raise_for_status=raise_for_status, **kwargs)66 async def post(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:67 """Site API POST."""68 return await self.request("POST", endpoint, raise_for_status=raise_for_status, **kwargs)69 async def put(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:70 """Site API PUT."""71 return await self.request("PUT", endpoint, raise_for_status=raise_for_status, **kwargs)72 async def delete(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> Optional[dict]:73 """Site API DELETE."""74 async with self.session.delete(self._url_for(endpoint), **kwargs) as resp:75 if resp.status == 204:76 return None77 await self.maybe_raise_for_status(resp, raise_for_status)...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!