Best Python code snippet using django-test-plus_python
paths.py
Source:paths.py
...225 #mode_collection.insert_one({"username": username.decode(), "theme": "light"})226 username = escape_html(username.decode()).encode()227 password = form_data['signup-password']['input']228 if b' ' in username or b' ' in password or form_data['signup-username']['input'] != username:229 handler.request.sendall(response_301("/"))230 return231 salt = bcrypt.gensalt()232 password = bcrypt.hashpw(base64.b64encode(hashlib.sha256(password).digest()), salt)233 # Check if username already exists234 account_data = account_collection.find()235 for entry in account_data:236 if entry["username"] == username:237 print("duplicate username", flush=True)238 handler.request.sendall(response_301("/"))239 return240 print(username, password)241 account_collection.insert_one(242 {"username": username, "password": password, "cookie": "-1", "salt": salt, "xsrf_token": "-1"})243 print("account created")244 handler.request.sendall(response_301("/"))245def login(request, handler):246 # Get parser info247 body = request.body248 headers = request.headers249 # Find boundary250 boundary = headers["Content-Type"].split('=')[1].strip()251 form_data = parse_form(body.strip(), boundary.encode())252 username = form_data['login-username']['input']253 username_for_theme = form_data['login-username']['input']254 username = escape_html(username.decode()).encode()255 password = form_data['login-password']['input']256 print(username, password)257 account_data = account_collection.find()258 for entry in account_data:259 # print(entry, flush=True)260 salt = entry["salt"]261 # print(salt, flush=True)262 encrypted_password = bcrypt.hashpw(base64.b64encode(hashlib.sha256(password).digest()), salt)263 # print("encrypted")264 # print(encrypted_password, entry["password"])265 if entry["username"] == username and entry["password"] == encrypted_password:266 # print("account found")267 cookie = str(uuid.uuid4()).encode()268 hashed_cookie = hashlib.sha256(cookie).digest()269 # print(cookie, type(cookie), flush=True)270 account_collection.update_one({"username": username}, {'$set': {'cookie': hashed_cookie}})271 cookie = b'Set-Cookie: id=' + cookie + b'; Max-Age=3600; HttpOnly'272 # print(cookie, flush=True)273 # print(redirect("/", cookie), flush=True)274 if is_theme(username_for_theme) == False:275 mode_collection.insert_one({"username": username.decode(), "theme": "light"})276 # generate a token and put it in the form277 # token = parse.generateXSRFToken()278 # xsrf_collection.insert_one({"xsrf_token": hashlib.sha256(token.encode()).digest(), "username": username})279 # with open("front_end/index.html", "rb") as file:280 # file = file.read()281 # new = file.replace(b"{{token}}", token.encode())282 # with open("front_end/index.html", "wb") as file:283 # file.write(new)284 handler.request.sendall(response_301("/", cookie))285 handler.request.sendall(response_301("/"))286def dark(request, handler):287 # Get parser info288 theme = "dark"289 username = get_username(request)290 if username == "":291 response.THEME = "light"292 handler.request.sendall(response_301("/"))293 else:294 mode_collection.update_one({"username": username}, {'$set': {"theme": theme}})295 handler.request.sendall(response_301("/"))296def light(request, handler):297 # Get parser info298 theme = "light"299 username = get_username(request)300 if username == "":301 response.THEME = "light"302 handler.request.sendall(response_301("/"))303 else:304 mode_collection.update_one({"username": username}, {'$set': {"theme": theme}})...
tests.py
Source:tests.py
...182 with self.login(username="u2"):183 self.get("index")184 self.response_302()185 self.get("/hits")186 self.response_301()187 self.get("/hitmen")188 self.response_301()189 self.get("/hits/bulk")190 self.response_302()191 self.get("/register")192 self.response_301()193 self.get("/logout")194 self.response_301()195 def test_hit_detail(self):196 h = Hit.objects.filter(assigned__username="u1").first()197 with self.subTest("view own hit") and self.login(username="u1"):198 self.get_check_200("hit_view", pk=h.id)199 h = Hit.objects.exclude(assigned__username="u1").first()200 with self.subTest("view others hit") and self.login(username="u1"):201 self.get("hit_view", pk=h.id)202 self.response_404()203 def test_create_hit(self):204 user = User.objects.get(username='u1')205 with self.login(username='u2'):206 self.get("create_hit")207 form = self.get_context("form")208 self.assertTrue(form.fields["assigned"].queryset.count() == 1)...
extensions.py
Source:extensions.py
1#!/usr/bin/python32# -*- coding: utf-8 -*-3# @Time : 2021/5/25 11:054# @Author : shl5# @File : extensions.py6# @Desc :7import logging8from scrapy import signals9import datetime10from threading import Timer11from influxdb import InfluxDBClient12logger = logging.getLogger(__name__)13class SpiderStatueStatistics:14 """15 ç¨äºç»è®¡ééç¶æçç»è®¡16 """17 def __init__(self, crawler, influxdb_params, interval):18 self.exit_code = False19 self.interval = interval20 self.crawler = crawler21 self.client = InfluxDBClient(**influxdb_params)22 self.stats_keys = set()23 self.cur_d = {24 'log_info': 0,25 'log_warning': 0,26 'requested': 0,27 'request_bytes': 0,28 'response': 0,29 'response_bytes': 0,30 'response_200': 0,31 'response_301': 0,32 'response_404': 0,33 'responsed': 0,34 'item': 0,35 'filtered': 0,36 }37 @classmethod38 def from_crawler(cls, crawler):39 influxdb_params = crawler.settings.get('INFLUXDB_PARAMS')40 interval = crawler.settings.get('INTERVAL', 60)41 ext = cls(crawler, influxdb_params, interval)42 crawler.signals.connect(ext.engine_started, signal=signals.engine_started)43 crawler.signals.connect(ext.engine_stopped, signal=signals.engine_stopped)44 crawler.signals.connect(ext.spider_closed, signal=signals.spider_closed)45 crawler.signals.connect(ext.spider_opened, signal=signals.spider_opened)46 return ext47 def spider_closed(self, spider, reason):48 logger.info(self.stats_keys)49 influxdb_d = {50 "measurement": "spider_closed",51 "time": datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),52 "tags": {53 'spider_name': spider.name54 },55 "fields": {56 'end_time': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),57 'reason': reason,58 'spider_name': spider.name59 }60 }61 if not self.client.write_points([influxdb_d]):62 raise IOError('åå
¥influxdb失败ï¼')63 def spider_opened(self, spider):64 influxdb_d = {65 "measurement": "spider_opened",66 "time": datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),67 "tags": {68 'spider_name': spider.name69 },70 "fields": {71 'start_time': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),72 'spider_name': spider.name73 }74 }75 if not self.client.write_points([influxdb_d]):76 raise IOError('åå
¥influxdb失败ï¼')77 logger.info('influxdb start is starting')78 def engine_started(self):79 Timer(self.interval, self.handle_stat).start()80 def engine_stopped(self):81 self.exit_code = True82 def handle_stat(self):83 stats = self.crawler.stats.get_stats()84 d = {85 'log_info': stats.get('log_count/INFO', 0),86 'dequeued': stats.get('scheduler/dequeued/redis', 0),87 'log_warning': stats.get('log_count/WARNING', 0),88 'requested': stats.get('downloader/request_count', 0),89 'request_bytes': stats.get('downloader/request_bytes', 0),90 'response': stats.get('downloader/response_count', 0),91 'response_bytes': stats.get('downloader/response_bytes', 0),92 'response_200': stats.get('downloader/response_status_count/200', 0),93 'response_301': stats.get('downloader/response_status_count/301', 0),94 'response_404': stats.get('downloader/response_status_count/404', 0),95 'responsed': stats.get('response_received_count', 0),96 'item': stats.get('item_scraped_count', 0),97 'depth': stats.get('request_depth_max', 0),98 'filtered': stats.get('bloomfilter/filtered', 0),99 'enqueued': stats.get('scheduler/enqueued/redis', 0),100 'spider_name': self.crawler.spider.name101 }102 for key in self.cur_d:103 d[key], self.cur_d[key] = d[key] - self.cur_d[key], d[key]104 influxdb_d = {105 "measurement": "newspider",106 "time": datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),107 "tags": {108 'spider_name': self.crawler.spider.name109 },110 "fields": d111 }112 if not self.client.write_points([influxdb_d]):113 raise IOError('åå
¥influxdb失败ï¼')114 self.stats_keys.update(stats.keys())115 if not self.exit_code:...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!