Best Python code snippet using autotest_python
my_utils.py
Source:my_utils.py
1import os2import numpy as np3from matplotlib import pyplot as plt4from sklearn.model_selection import train_test_split, KFold5def get_cross_validated_subjects(x, y, names, n_folds = 5):6 ''' A function to implement the cross-validation manually '''7 new_person = "ss_ss_ss_ss"8 subjects_pt = np.array([], dtype=str)9 subjects_co = np.array([], dtype=str)10 subjects_pt_idx = np.array([], dtype=int)11 subjects_co_idx = np.array([], dtype=int)12 for i in range(len(names)):13 if new_person.split("_")[0:2] != names[i].split("_")[0:2]:14 new_person = names[i]15 if new_person.__contains__("Pt"):16 subjects_pt = np.append(subjects_pt, [new_person], axis=0)17 subjects_pt_idx = np.append(subjects_pt_idx, [i], axis=0)18 else:19 subjects_co = np.append(subjects_co, [new_person], axis=0)20 subjects_co_idx = np.append(subjects_co_idx, [i], axis=0)21 print("------", subjects_pt.shape, subjects_pt_idx.shape)22 kf = KFold(n_splits=n_folds)23 KFold(n_splits=n_folds, random_state=42, shuffle=True)24 splits = kf.split(subjects_pt, subjects_pt_idx)25 subjects_pt_train_all = np.asarray([])26 subjects_pt_train_idx_all = np.asarray([])27 subjects_pt_test_all = np.asarray([])28 subjects_pt_test_idx_all = np.asarray([])29 n_fold_train_samples = []30 n_fold_test_samples = []31 train_starting = 032 test_starting = 033 for pt_name_train_index, pt_name_test_index in kf.split(subjects_pt, subjects_pt_idx):34 subjects_pt_train_all = np.append(subjects_pt_train_all, subjects_pt[pt_name_train_index])35 subjects_pt_train_idx_all = np.append(subjects_pt_train_idx_all, subjects_pt_idx[pt_name_train_index])36 n_fold_train_samples.append(train_starting)37 train_starting += subjects_pt[pt_name_train_index].shape[0]38 subjects_pt_test_all = np.append(subjects_pt_test_all, subjects_pt[pt_name_test_index])39 subjects_pt_test_idx_all = np.append(subjects_pt_test_idx_all, subjects_pt_idx[pt_name_test_index])40 n_fold_test_samples.append(test_starting)41 test_starting += subjects_pt[pt_name_test_index].shape[0] - 142 patients = [subjects_pt_train_all, subjects_pt_train_idx_all, subjects_pt_test_all, subjects_pt_test_idx_all, n_fold_train_samples, n_fold_test_samples]43 subjects_co_train_all = np.asarray([])44 subjects_co_train_idx_all = np.asarray([])45 subjects_co_test_all = np.asarray([])46 subjects_co_test_idx_all = np.asarray([])47 n_fold_train_samples = []48 n_fold_test_samples = []49 train_starting = 050 test_starting = 051 for co_name_train_index, co_name_test_index in kf.split(subjects_co, subjects_co_idx):52 subjects_co_train_all = np.append(subjects_co_train_all, subjects_co[co_name_train_index])53 subjects_co_train_idx_all = np.append(subjects_co_train_idx_all, subjects_co_idx[co_name_train_index])54 n_fold_train_samples.append(train_starting)55 train_starting += subjects_co[co_name_train_index].shape[0] - 156 subjects_co_test_all = np.append(subjects_co_test_all, subjects_co[co_name_test_index])57 subjects_co_test_idx_all = np.append(subjects_co_test_idx_all, subjects_co_idx[co_name_test_index])58 n_fold_test_samples.append(test_starting)59 test_starting += subjects_co[co_name_test_index].shape[0]60 controls = [subjects_co_train_all, subjects_co_train_idx_all, subjects_co_test_all, subjects_co_test_idx_all,61 n_fold_train_samples, n_fold_test_samples]62 return patients, controls63def split_humanly_v2(x, y, names, pt_name_train_x, pt_name_test_x, pt_name_train_y, pt_name_test_y, co_name_train_x, co_name_test_x, co_name_train_y, co_name_test_y):64 name_train_x = np.append(pt_name_train_x[:co_name_train_x.shape[0]], co_name_train_x, axis=0)65 name_train_x = name_train_x.reshape(name_train_x.shape[0], 1)66 name_train_y = np.append(pt_name_train_y[:co_name_train_y.shape[0]], co_name_train_y, axis=0)67 name_train_y = name_train_y.reshape(name_train_y.shape[0], 1)68 train = np.concatenate((name_train_x, name_train_y), axis=1)69 np.random.shuffle(train)70 name_train_x = train[:, 0]71 name_train_y = np.asarray(train[:, 1], dtype=int)72 name_test_x = np.append(pt_name_test_x[:co_name_test_x.shape[0]], co_name_test_x, axis=0)73 name_test_x = name_test_x.reshape(name_test_x.shape[0], 1)74 name_test_y = np.append(pt_name_test_y[:co_name_test_y.shape[0]], co_name_test_y, axis=0)75 name_test_y = name_test_y.reshape(name_test_y.shape[0], 1)76 test = np.concatenate((name_test_x, name_test_y), axis=1)77 np.random.shuffle(test)78 name_test_x = test[:, 0]79 name_test_y = np.asarray(test[:, 1], dtype=int)80 name_train_x = name_train_x.reshape(name_train_x.shape[0], )81 name_train_y = name_train_y.reshape(name_train_y.shape[0], )82 name_test_x = name_test_x.reshape(name_test_x.shape[0], )83 name_test_y = name_test_y.reshape(name_test_y.shape[0], )84 print("-total train:", len(name_train_x), "-Pt in train:", len([i for i in name_train_x if i.__contains__("Pt")]),85 "-Co in train:", len([i for i in name_train_x if i.__contains__("Co")]))86 print("-total test:", len(name_test_x), "-Pt in test:", len([i for i in name_test_x if i.__contains__("Pt")]),87 "-Co in test:", len([i for i in name_test_x if i.__contains__("Co")]))88 train_x = np.array([])89 train_y = np.array([])90 for i in range(len(name_train_y)):91 unique_name = name_train_x[i]92 if np.size(train_x) == 0:93 train_x = [x[name_train_y[i], :, :]]94 train_y = [y[name_train_y[i], :]]95 else:96 train_x = np.append(train_x, [x[name_train_y[i], :, :]], axis=0)97 train_y = np.append(train_y, [y[name_train_y[i], :]], axis=0)98 next = name_train_y[i] + 199 while next < len(names) and unique_name.split("_")[0:2] == names[next].split("_")[0:2]:100 train_x = np.append(train_x, [x[next, :, :]], axis=0)101 train_y = np.append(train_y, [y[next, :]], axis=0)102 next = next + 1103 test_x = np.array([])104 test_y = np.array([])105 for i in range(len(name_test_y)):106 unique_name = name_test_x[i]107 if np.size(test_x) == 0:108 test_x = [x[name_test_y[i], :, :]]109 test_y = [y[name_test_y[i], :]]110 else:111 test_x = np.append(test_x, [x[name_test_y[i], :, :]], axis=0)112 test_y = np.append(test_y, [y[name_test_y[i], :]], axis=0)113 next = name_test_y[i] + 1114 while next < len(names) and unique_name.split("_")[0:2] == names[next].split("_")[0:2]:115 test_x = np.append(test_x, [x[next, :, :]], axis=0)116 test_y = np.append(test_y, [y[next, :]], axis=0)117 next = next + 1118 return train_x, train_y, test_x, test_y119def split_humanly(x, y, names):120 new_person = "ss_ss_ss_ss"121 subjects_pt = np.array([], dtype=str)122 subjects_co = np.array([], dtype=str)123 subjects_pt_idx = np.array([], dtype=int)124 subjects_co_idx = np.array([], dtype=int)125 # subjects = np.array([new_person], dtype=str)126 # subjects_idx = np.array([0], dtype=int)127 for i in range(len(names)):128 if new_person.split("_")[0:2] != names[i].split("_")[0:2]:129 new_person = names[i]130 if new_person.__contains__("Pt"):131 subjects_pt = np.append(subjects_pt, [new_person], axis=0)132 subjects_pt_idx = np.append(subjects_pt_idx, [i], axis=0)133 else:134 subjects_co = np.append(subjects_co, [new_person], axis=0)135 subjects_co_idx = np.append(subjects_co_idx, [i], axis=0)136 # subjects = np.append(subjects, [new_person], axis=0)137 # subjects_idx = np.append(subjects_idx, [i], axis=0)138 pt_name_train_x, pt_name_test_x, pt_name_train_y, pt_name_test_y = train_test_split(subjects_pt, subjects_pt_idx,139 test_size=0.53,140 random_state=42, shuffle=True)141 pt_name_valid_x, pt_name_test_x, pt_name_valid_y, pt_name_test_y = train_test_split(pt_name_test_x, pt_name_test_y,142 test_size=0.60,143 random_state=42, shuffle=True)144 co_name_train_x, co_name_test_x, co_name_train_y, co_name_test_y = train_test_split(subjects_co, subjects_co_idx,145 test_size=0.14,146 random_state=42, shuffle=True)147 co_name_valid_x, co_name_test_x, co_name_valid_y, co_name_test_y = train_test_split(co_name_test_x, co_name_test_y,148 test_size=0.60,149 random_state=42, shuffle=True)150 name_train_x = np.append(pt_name_train_x[:co_name_train_x.shape[0]], co_name_train_x, axis=0)151 name_train_x = name_train_x.reshape(name_train_x.shape[0], 1)152 name_train_y = np.append(pt_name_train_y[:co_name_train_y.shape[0]], co_name_train_y, axis=0)153 name_train_y = name_train_y.reshape(name_train_y.shape[0], 1)154 train = np.concatenate((name_train_x, name_train_y), axis=1)155 np.random.shuffle(train)156 name_train_x = train[:, 0]157 name_train_y = np.asarray(train[:, 1], dtype=int)158 name_valid_x = np.append(pt_name_valid_x[:co_name_valid_x.shape[0]], co_name_valid_x, axis=0)159 name_valid_x = name_valid_x.reshape(name_valid_x.shape[0], 1)160 name_valid_y = np.append(pt_name_valid_y[:co_name_valid_y.shape[0]], co_name_valid_y, axis=0)161 name_valid_y = name_valid_y.reshape(name_valid_y.shape[0], 1)162 valid = np.concatenate((name_valid_x, name_valid_y), axis=1)163 np.random.shuffle(valid)164 name_valid_x = valid[:, 0]165 name_valid_y = np.asarray(valid[:, 1], dtype=int)166 name_test_x = np.append(pt_name_test_x[:co_name_test_x.shape[0]], co_name_test_x, axis=0)167 name_test_x = name_test_x.reshape(name_test_x.shape[0], 1)168 name_test_y = np.append(pt_name_test_y[:co_name_test_y.shape[0]], co_name_test_y, axis=0)169 name_test_y = name_test_y.reshape(name_test_y.shape[0], 1)170 test = np.concatenate((name_test_x, name_test_y), axis=1)171 np.random.shuffle(test)172 name_test_x = test[:, 0]173 name_test_y = np.asarray(test[:, 1], dtype=int)174 name_train_x = name_train_x.reshape(name_train_x.shape[0], )175 name_train_y = name_train_y.reshape(name_train_y.shape[0], )176 name_valid_x = name_valid_x.reshape(name_valid_x.shape[0], )177 name_valid_y = name_valid_y.reshape(name_valid_y.shape[0], )178 name_test_x = name_test_x.reshape(name_test_x.shape[0], )179 name_test_y = name_test_y.reshape(name_test_y.shape[0], )180 print("-total train:", len(name_train_x), "-Pt in train:", len([i for i in name_train_x if i.__contains__("Pt")]),181 "-Co in train:", len([i for i in name_train_x if i.__contains__("Co")]))182 print("-total valid:", len(name_valid_x), "-Pt in valid:", len([i for i in name_valid_x if i.__contains__("Pt")]),183 "-Co in valid:", len([i for i in name_valid_x if i.__contains__("Co")]))184 print("-total test:", len(name_test_x), "-Pt in test:", len([i for i in name_test_x if i.__contains__("Pt")]),185 "-Co in test:", len([i for i in name_test_x if i.__contains__("Co")]))186 train_x = np.array([])187 train_y = np.array([])188 for i in range(len(name_train_y)):189 unique_name = name_train_x[i]190 if np.size(train_x) == 0:191 train_x = [x[name_train_y[i], :, :]]192 train_y = [y[name_train_y[i], :]]193 else:194 train_x = np.append(train_x, [x[name_train_y[i], :, :]], axis=0)195 train_y = np.append(train_y, [y[name_train_y[i], :]], axis=0)196 next = name_train_y[i] + 1197 while next < len(names) and unique_name.split("_")[0:2] == names[next].split("_")[0:2]:198 train_x = np.append(train_x, [x[next, :, :]], axis=0)199 train_y = np.append(train_y, [y[next, :]], axis=0)200 next = next + 1201 valid_x = np.array([])202 valid_y = np.array([])203 for i in range(len(name_valid_y)):204 unique_name = name_valid_x[i]205 if np.size(valid_x) == 0:206 valid_x = [x[name_valid_y[i], :, :]]207 valid_y = [y[name_valid_y[i], :]]208 else:209 valid_x = np.append(valid_x, [x[name_valid_y[i], :, :]], axis=0)210 valid_y = np.append(valid_y, [y[name_valid_y[i], :]], axis=0)211 next = name_valid_y[i] + 1212 while next < len(names) and unique_name.split("_")[0:2] == names[next].split("_")[0:2]:213 valid_x = np.append(valid_x, [x[next, :, :]], axis=0)214 valid_y = np.append(valid_y, [y[next, :]], axis=0)215 next = next + 1216 test_x = np.array([])217 test_y = np.array([])218 for i in range(len(name_test_y)):219 unique_name = name_test_x[i]220 if np.size(test_x) == 0:221 test_x = [x[name_test_y[i], :, :]]222 test_y = [y[name_test_y[i], :]]223 else:224 test_x = np.append(test_x, [x[name_test_y[i], :, :]], axis=0)225 test_y = np.append(test_y, [y[name_test_y[i], :]], axis=0)226 next = name_test_y[i] + 1227 while next < len(names) and unique_name.split("_")[0:2] == names[next].split("_")[0:2]:228 test_x = np.append(test_x, [x[next, :, :]], axis=0)229 test_y = np.append(test_y, [y[next, :]], axis=0)230 next = next + 1231 return train_x, train_y, valid_x, valid_y, test_x, test_y232def load_data(path="data/", function="None", normalization=False):233 x = np.array([])234 y = np.array([])235 names = np.genfromtxt(path + sorted(os.listdir(path))[1], dtype=str)[1:, -1]236 for filename in sorted(os.listdir(path)):237 if not filename.endswith(".csv"): # if the file is not valid238 continue239 new_x = np.genfromtxt(path + filename, dtype=float)[1:, :-1] # convert each csv file to numpy array240 new_name = np.genfromtxt(path + filename, dtype=str)[1:, -1] # name of each sample ex : JuCo23_01_28_L1241 new_y = np.zeros((np.size(new_name, axis=0), 1))242 for i in range(np.size(new_y, axis=0)):243 if str(new_name[i,]).__contains__("Pt"): # Patient as positive244 new_y[i,] = 1245 else:246 new_y[i,] = 0247 if np.size(x) == 0:248 x = new_x249 y = new_y250 names = new_name251 else:252 x = np.dstack((x, new_x))253 y = np.dstack((y, new_y))254 n_values = x.shape[2] # number of total sensors255 if function == "average":256 n_values = 2257 y = y[:, :, n_values - 1]258 average_left = np.mean(x[:, :, :n_values], axis=2)259 average_right = np.mean(x[:, :, n_values:], axis=2)260 x = np.dstack((average_left, average_right))261 if function == "None": # 16 sensors262 y = y[:, :, n_values - 1]263 if function == "difference": # to reduce the total sensors to half264 n_values = int(n_values / 2)265 x = x[:, :, n_values:] - x[:, :, :n_values]266 y = y[:, :, n_values - 1]267 if normalization:268 for i in range(x.shape[0]):269 for j in range(x.shape[2]):270 m = np.mean(x[i, :, j])271 s = np.std(x[i, :, j]) + np.finfo(np.float32).eps272 x[i, :, j] = (x[i, :, j] - m) / s273 return x, y, names274def plot_data(x, y, type="normall"):275 plt.style.use("seaborn")276 my_dpi = 96277 plt.figure(figsize=(2000 / my_dpi, 1000 / my_dpi), dpi=my_dpi)278 T_x = x.shape[0] # length of the sequence279 n_values = x.shape[1] # number of the values(sensors)280 t = np.arange(0, T_x / 100, 0.01)281 if type == "average":282 y1 = np.ones(t.size)283 y2 = np.ones(t.size) * 2284 z1 = x[:, 0]285 z2 = x[:, 1]286 ax = plt.subplot(projection='3d')287 ax.plot(t, y1, z1, color='r')288 ax.plot(t, y2, z2, color='g')289 ax.add_collection3d(plt.fill_between(t, z1, z1, color='r', alpha=0.3), zs=1, zdir='y')290 ax.add_collection3d(plt.fill_between(t, z2, z2, color='g', alpha=0.3), zs=2, zdir='y')291 ax.set_xlabel('Time(s)', fontsize=20)292 ax.set_zlabel('Average vGRFs(N)', fontsize=20)293 if y == 1:294 subject_type = "Patient Subject"295 elif y == 0:296 subject_type = "Control Subject"297 ax.text2D(0.05, 0.95, subject_type, transform=ax.transAxes, fontsize=20)298 plt.show()299 else:300 y1 = np.ones(t.size)301 y2 = np.ones(t.size) * 2302 y3 = np.ones(t.size) * 3303 y4 = np.ones(t.size) * 4304 y5 = np.ones(t.size) * 5305 y6 = np.ones(t.size) * 6306 y7 = np.ones(t.size) * 7307 y8 = np.ones(t.size) * 8308 z1 = x[:, 0]309 z2 = x[:, 1]310 z3 = x[:, 2]311 z4 = x[:, 3]312 z5 = x[:, 4]313 z6 = x[:, 5]314 z7 = x[:, 6]315 z8 = x[:, 7]316 ax = plt.subplot(projection='3d')317 ax.plot(t, y1, z1, color='r')318 ax.plot(t, y2, z2, color='g')319 ax.plot(t, y3, z3, color='b')320 ax.plot(t, y4, z4, color='c')321 ax.plot(t, y5, z5, color='m')322 ax.plot(t, y6, z6, color='y')323 ax.plot(t, y7, z7, color='w')324 ax.plot(t, y8, z8, color='k')325 ax.add_collection3d(plt.fill_between(t, z1, z1, color='r', alpha=0.3), zs=1, zdir='y')326 ax.add_collection3d(plt.fill_between(t, z2, z2, color='g', alpha=0.3), zs=2, zdir='y')327 ax.add_collection3d(plt.fill_between(t, z3, z3, color='b', alpha=0.3), zs=3, zdir='y')328 ax.add_collection3d(plt.fill_between(t, z4, z4, color='c', alpha=0.3), zs=4, zdir='y')329 ax.add_collection3d(plt.fill_between(t, z5, z5, color='m', alpha=0.3), zs=5, zdir='y')330 ax.add_collection3d(plt.fill_between(t, z6, z6, color='y', alpha=0.3), zs=6, zdir='y')331 ax.add_collection3d(plt.fill_between(t, z7, z7, color='w', alpha=0.3), zs=7, zdir='y')332 ax.add_collection3d(plt.fill_between(t, z8, z8, color='k', alpha=0.3), zs=8, zdir='y')333 ax.set_xlabel('Time(s)', fontsize=20)334 ax.set_zlabel('vGRFs(N)', fontsize=20)335 if y == 1:336 subject_type = "Patient Subject"337 elif y == 0:338 subject_type = "Control Subject"339 ax.text2D(0.05, 0.95, subject_type, transform=ax.transAxes, fontsize=20)...
evaluation.py
Source:evaluation.py
1# python -m pip install tldextract2import tldextract3import time4import requests5import random6import csv7def read_urls(path=None):8 if not path:9 # list of test URLs [URL,title]10 urls = [11 ['https://versicherung.ge-be-in.de/startseite', "Sterbegeldversicherung seit 1923 - GE·BE·IN Versicherungen"], 12 ['https://idsrv.lv1871.de/Login', '"Lebensversicherung von 1871 a. G. München"'],13 ['http://www.rogerebert.com/reviews/reservoir-dogs-1992', 'Reservoir Dogs movie review & film summary (1992) | Roger Ebert'],14 ['http://www.kuselit.de/', ':: Kuselit Verlag :: Rechtsbibliografie'],15 ['http://www.ullapoolholidayhomes.com/', 'Holiday Accommodation Self Catering Cottages | Ullapool Scotland NC500'],16 ['http://www.art-und-tat.de/', 'Veronika Jakob Grafikdesign'],17 ['http://www.tuffies.co.uk/', 'Buy Luxury Dog Beds UK Online | British Handmade Heavy Duty & Chew Proof Dog Beds | Tuffies'],18 # ['http://www.comune.pietraperzia.en.it/', 'Comune di Pietraperzia'],19 ['http://www.traditieialomita.ro/', 'TradiÈie Èi Modernism â Centrul JudeÈean pentru Conservarea Èi Promovarea Culturii TradiÈionale IalomiÈa'],20 ['http://www.theglenlyngorge.co.uk/', '\n']21 #['http://www.kronos.co.uk/', 'Workforce Management and HCM Cloud Solutions | Kronos UK']22 ]23 return urls24 urls = []25 with open(path, newline='') as csvfile:26 reader = csv.reader(csvfile, delimiter=',') # encoding='utf-8'27 next(reader) # skip header 28 for row in reader:29 # print("Reading: " + ', '.join(row))30 url_pair = [row[0],row[1]]31 urls.append(url_pair)32 return urls33def write_sample(fake_urls,benign_urls):34 with open('./' + 'data/fake_urls.csv', 'w', newline='', encoding='utf-8') as file:35 writer = csv.writer(file, delimiter=',')36 writer.writerow(['url','title'])37 writer.writerows(fake_urls)38 with open('./' + 'data/benign_urls.csv', 'w', newline='', encoding='utf-8') as file:39 writer = csv.writer(file, delimiter=',')40 writer.writerow(['url','title'])41 writer.writerows(benign_urls)42def get_starting_url(file):43 f = open(file, "r")44 return f.read()45def set_starting_url(file,index,close=True):46 f = open(file, "w")47 f.write(index)48 if close:49 f.close()50def split_list(a_list):51 half = len(a_list)//252 return a_list[:half], a_list[half:]53def evaluate(urls, index_writer, fake=True, starting_url=0, test=False):54 nof_skipped_samples = 055 i = 0 if not starting_url else int(starting_url)56 # if not starting_url:57 # i = 058 # else:59 # i = int(starting_url)60 # for url in urls:61 for url in urls[i:len(urls)]:62 if not url[1].strip():63 print("Stripped title of " + url[0] + " is empty. Going on.")64 nof_skipped_samples = nof_skipped_samples + 165 i = i + 166 continue67 else:68 url[1] = url[1].strip()69 #strip from quotes70 url[1] = url[1].strip('\"')71 # i = i + 172 # create json:73 if fake: #request for fake phishing site74 current_json = {75 "URL": url[0],76 "uuid": str(client_uuid),77 "pagetitle": url[1],78 # "image64": "",79 "phishURL" : "http://bogusurl" + str(i) + ".co.uk"80 }81 else: #request for benign site82 current_json = {83 "URL": url[0],84 "uuid": str(client_uuid),85 "pagetitle": url[1],86 # "image64": "",87 "phishURL" : ""88 }89 # test_json = {90 # "URL": "https://idsrv.lv1871.de/Login",91 # "uuid": "63054094-01c4-11ed-b939-0242ac120002",92 # "pagetitle": "Lebensversicherung von 1871 a. G. München",93 # "image64": "",94 # "phishURL" : "http://bogusurl1.co.uk"95 # }96 if fake:97 print(str(i) + ': Sent request for ' + url[0] + ' with fakeURL ' + str(current_json["phishURL"]))98 else:99 print(str(i) + ': Sent request for ' + url[0] + ' with benignURL ' + str(current_json["URL"]))100 try:101 res = requests.post('http://192.168.56.100:5000/api/v1/url', json=current_json, timeout = 1200)102 except requests.Timeout as err:103 print(str(url[0]) + " timed out. Going on.")104 print(err)105 nof_skipped_samples = nof_skipped_samples + 1106 i = i +1107 continue108 except requests.RequestException as err:109 print("Error with requets: ")110 print(err)111 print(url[0] + " got error. Going on.")112 nof_skipped_samples = nof_skipped_samples + 1113 i = i + 1 114 continue115 print('Response from antiPhish server: ', res.text)116 #write current processed index to file117 if not test:118 set_starting_url(path_fake_counter,str(i)) if fake else set_starting_url(path_benign_counter,str(i))119 else:120 set_starting_url("data/test_starting.txt",str(i))121 # sleep every x calls to not trigger Google limiting122 if i % 3 == 0:123 sleep_time = random.randint(20,30)124 sleep_time = 1125 print("Sleeping " + str(sleep_time) + " seconds.")126 time.sleep(sleep_time)127 128 # go to next URL129 i = i + 1130 #write last processed index to file again when finished131 if not test:132 set_starting_url(path_fake_counter,str(i)) if fake else set_starting_url(path_benign_counter,str(i))133 else:134 set_starting_url("data/test_starting.txt",str(i))135## global constants ##136test = False137write_samples = False138path_urls = "data/urls.csv" if not test else None139path_fake_urls = "data/fake_urls.csv"# if not test else None140path_benign_urls = "data/benign_urls.csv"# if not test else None141path_fake_counter = "data/last_fake_url.txt"142path_benign_counter = "data/last_benign_url.txt"143fake_starting_url = get_starting_url(path_fake_counter) if not test else 0144benign_starting_url = get_starting_url(path_benign_counter) if not test else 0145# client_uuid = "73054094-01c4-11ed-b939-0242ac120002" #first attempt146# client_uuid = "simulation-" + time.strftime("%H:%M:%S %a, %d-%b-%Y", time.gmtime())147client_uuid = "simulation_1"148# total number of urls to evaluate (split 50/50 for fake/benign)149#urls_to_process = 2274 if not test else None150urls_to_process = None151 152def main():153 startTime = time.time()154 print("Starting time: " + time.strftime("%H:%M:%S %a, %d-%b-%Y", time.gmtime()))155 # get all available urls156 urls = read_urls(path_urls)157 print("Read " + str(len(urls)) + " URLs from " + str(path_urls))158 if not urls_to_process:159 nof_urls = len(urls)160 else:161 nof_urls = urls_to_process162 print("Total number of URLs to evaluate: " + str(nof_urls) + "\n")163 if not test:164 if not write_samples:165 # read fake phish and benign urls166 print("Reading already sampled URLs.")167 fake_urls = read_urls(path_fake_urls)168 print("Read " + str(len(fake_urls)) + " fake URLs from " + str(path_fake_urls))169 benign_urls = read_urls(path_benign_urls)170 print("Read " + str(len(benign_urls)) + " benign URLs from " + str(path_benign_urls))171 else:172 # sample 50% fake_urls and 50% benign_urls173 sampled_urls = random.sample(urls,nof_urls)174 fake_urls, benign_urls = split_list(sampled_urls)175 write_sample(fake_urls,benign_urls)176 # evaluate177 print("Starting fake url: " + str(fake_starting_url))178 print("Beginning to evaluate " + str(len(fake_urls) - int(fake_starting_url)) + " fake URLs.")179 f = open(path_fake_counter, "w")180 evaluate(fake_urls, f, True, fake_starting_url)181 f.close()182 # sleep 5 min before starting the benign urls183 #time.sleep(300)184 print("Starting benign url: " + str(benign_starting_url))185 print("Beginning to evaluate " + str(len(benign_urls) - int(benign_starting_url)) + " benign URLs.\n")186 f = open(path_benign_counter, "w")187 evaluate(benign_urls, f, False, benign_starting_url)188 f.close()189 else:190 test_starting_url = get_starting_url("data/test_starting.txt")191 print("Starting test url: " + str(test_starting_url))192 print("Running test with " + str(nof_urls - int(test_starting_url)) + " URLs \n")193 f = open("data/test_starting.txt", "w")194 evaluate(urls, f, False, test_starting_url, True)195 f.close()196 stopTime = time.time()197 print("Ending time: " + time.strftime("%H:%M:%S %a, %d-%b-%Y", time.gmtime()))198 print(f"Finished. Time elapsed for complete evaluation is {round((stopTime - startTime)/60, 2)} min\n")199if __name__ == '__main__':...
test_schedule.py
Source:test_schedule.py
...89 class TestInterval:90 class TestFinal:91 def test_do(self): # synced92 assert True93 def test_starting(self): # synced94 assert True95 def test_ending(self): # synced96 assert True97 class TestChainableFinal:98 def test_and_(self): # synced99 assert True100 class TestDay:101 def test_at(self): # synced102 assert True103 class TestChainableDay:104 def test_and_(self): # synced105 assert True106 class TestChainableWeekDay:107 def test_and_(self): # synced108 assert True109 class TestMonth:110 def test_on_the(self): # synced111 assert True112 def test_on(self): # synced113 assert True114 class TestChainableMonth:115 def test_and_(self): # synced116 assert True117 class TestYear:118 def test_in_(self): # synced119 assert True120class TestRelative:121 class TestSettings:122 pass123 class TestSelector:124 class TestBase:125 pass126 class TestSecond:127 def test_seconds(self): # synced128 assert True129 class TestMinute:130 def test_minutes(self): # synced131 assert True132 class TestHour:133 def test_hours(self): # synced134 assert True135 class TestDay:136 def test_days(self): # synced137 assert True138 class TestMonth:139 def test_months(self): # synced140 assert True141 class TestYear:142 def test_years(self): # synced143 assert True144 class TestInterval:145 class TestFinal:146 def test_do(self): # synced147 assert True148 def test_starting(self): # synced149 assert True150 def test_ending(self): # synced151 assert True152 class TestMinute:153 def test_and_(self): # synced154 assert True155 class TestHour:156 def test_and_(self): # synced157 assert True158 class TestDay:159 def test_and_(self): # synced160 assert True161 class TestMonth:162 def test_and_(self): # synced...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!