Best Python code snippet using lemoncheesecake
graphlet_kernels.py
Source:graphlet_kernels.py
1# -*- coding: utf-8 -*-2"""3Created on Thu Jul 28 15:57:41 20164@author: Syzygy5"""6#Graphlet kernels (sampled k graphlet, sampled 3&4 graphlets, all connected 3,4-graphlets, all connected 3,4,5-graphlets (and weights or normalize options))7import numpy as np8import networkx as nx9import time 10import itertools11import random12import math13def number_of_graphlets(size):14 """Number of all undirected graphlets of given size"""15 if size == 2:16 return 217 if size == 3:18 return 419 if size == 4:20 return 1121 if size == 5:22 return 3423def generate_graphlets(size):24 """Generates graphlet array from previously stored csv data"""25 if size == 3:26 return np.genfromtxt('/Users/Syzygy/workspace/Stage_Shanghai/3graphlets.csv',delimiter=',').reshape(4, 3, 3)27 elif size == 4:28 return np.genfromtxt('/Users/Syzygy/workspace/Stage_Shanghai/4graphlets.csv',delimiter=',').reshape(11, 4, 4)29def is_3star(adj_mat):30 """Check if a given graphlet of size 4 is a 3-star"""31 return (adj_mat.sum() == 10 and 4 in [a.sum() for a in adj_mat])32def _4_graphlet_contains_3star(adj_mat):33 """Check if a given graphlet of size 4 contains a 3-star"""34 return (4 in [a.sum() for a in adj_mat])35def compare_graphlets(am1, am2):36 """37 Compare two graphlets.38 """39 adj_mat1 = am140 adj_mat2 = am241 np.fill_diagonal(adj_mat1, 1)42 np.fill_diagonal(adj_mat2, 1)43 k = np.array(adj_mat1).shape[0]44 if k == 3:45 # the number of edges determines isomorphism of graphs of size 3.46 return np.array(adj_mat1).sum() == np.array(adj_mat2).sum()47 else:48 # (k-1) graphlet count determines graph isomorphism for small graphs49 # return (_count_graphlets(adj_mat1, k-1, graphlet3_array, None) ==50 # _count_graphlets(adj_mat2, k-1, graphlet3_array, None)).all()51 if not np.array(adj_mat1).sum() == np.array(adj_mat2).sum():52 return False53 if np.array(adj_mat1).sum() in (4, 6, 14, 16):54 # 0, 1, 5 or 6 edges55 return True56 if np.array(adj_mat1).sum() == 8:57 # 2 edges - two pairs or 2-path58 return 3.0 in [adj_mat.sum() for adj_mat in adj_mat1] == 3.0 in [adj_mat.sum() for adj_mat in adj_mat2]59 if np.array(adj_mat1).sum() == 10:60 # 3 edges - 3-star, 3-path or 3-cycle61 sums1 = [adj_mat.sum() for adj_mat in adj_mat1]62 sums2 = [adj_mat.sum() for adj_mat in adj_mat2]63 if (is_3star(adj_mat1) + is_3star(adj_mat2))%2 == 1:64 return False65 if is_3star(adj_mat1) and is_3star(adj_mat2):66 return True67 return (1 in sums1) == (1 in sums2)68 if np.array(adj_mat1).sum() == 12:69 # 4 edges - a simple cycle or something containing 3-star70 return _4_graphlet_contains_3star(adj_mat1) == _4_graphlet_contains_3star(adj_mat2)71 return False72def graphlet_index(adj_mat, graphlet_array):73 """Return index to increment."""74 for i, g in enumerate(graphlet_array):75 if compare_graphlets(adj_mat, g):76 return i77 return -178def count_graphlets(adj_mat, size, graphlet_array):79 adj_mat = adj_mat.todense()80 res = np.zeros((1, number_of_graphlets(size)))81 for subset in itertools.combinations(range(adj_mat.shape[0]), size):82 graphlet = (adj_mat[subset, :])[:, subset]83 res[0][graphlet_index(graphlet, graphlet_array)] += 184 # print "returning ", res / sum(sum(res))85 return res / res.sum()86def random_combination(iterable, r):87 "Random selection from itertools.combinations(iterable, r)"88 pool = tuple(iterable)89 n = len(pool)90 indices = sorted(random.sample(range(n), r))91 return tuple(pool[i] for i in indices)92def count_graphlets_sampling(adj_mat, size, graphlet_array, s):93 """Count all graphlets of given size"""94 adj_mat = adj_mat.todense()95 res = np.zeros((1, number_of_graphlets(size)))96 for i in range(s):97 #get random nodes that will form the graphlet98 subset=random_combination(range(adj_mat.shape[0]), size)99 #construct graphlet100 graphlet = (adj_mat[subset, :])[:, subset]101 #increment index that correspond to the graphlet created102 res[0][graphlet_index(graphlet, graphlet_array)] += 1103 return res104def computekgraphlet(k, list_graphs, s):105 """Computes k-graphlets kernel matrix, with s samples"""106 d1 = np.zeros((len(list_graphs), number_of_graphlets(k)))107 graphlet_array=generate_graphlets(k)108 for i, commune in enumerate(list_graphs):109 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 110 node_type=None, relabel=True, version='1.1draft')111 graph=nx.adjacency_matrix(graph, weight=None)112 d1[i] = count_graphlets_sampling(graph, k, graphlet_array,s)113 #normalize by the number of graphlets114 d1[i]=d1[i]/sum(d1[i])115 if i%10==0:116 print(i,'graphs done')117 return d1.dot(d1.T)118def compute34graphlet(list_graphs, s):119 """Computes 3,4-graphlets kernel matrix, with s samples"""120 d1 = np.zeros((len(list_graphs), number_of_graphlets(3)+number_of_graphlets(4)))121 graphlet_array3=generate_graphlets(3)122 graphlet_array4=generate_graphlets(4)123 for i, commune in enumerate(list_graphs):124 #print(commune)125 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 126 node_type=None, relabel=True, version='1.1draft')127 graph=nx.adjacency_matrix(graph, weight=None)128 d1[i] = np.concatenate((count_graphlets_sampling(graph, 3, graphlet_array3,s)[0],129 count_graphlets_sampling(graph, 4, graphlet_array4,s)[0]))130 #normalize by the number of graphlets131 d1[i]=d1[i]/sum(d1[i])132 if i%100==0:133 print(i,'graphs done')134 return d1.dot(d1.T)135def findPaths(G,u,n):136 """Finds all the paths of length n starting from node u of graph G"""137 if n==0:138 return [[u]]139 paths = [[u]+path for neighbor in G.neighbors(u) for path in findPaths(G,neighbor,n-1) if u not in path]140 return paths141def count_all_connected_3graphlets(graph):142 """Establish distribution of all-connected 3-graphlet in graph"""143 res=[0]*2144 graph=nx.convert_node_labels_to_integers(graph)145 A=nx.adjacency_matrix(graph, weight=None)146 for node in graph.nodes():147 for path in findPaths(graph, node, 2):148 if A[path[0],path[2]]==1:149 res[0]=res[0]+1150 #print(path,'is connected graphlet which is a cycle')151 else:152 res[1]=res[1]+1153 #print(path,'is connected graphlet which is not a cycle')154 res[0]=res[0]/6155 res[1]=res[1]/2156 return res157def count_all_connected_4graphlets(graph):158 """Establish distribution of all-connected 4-graphlet in graph"""159 res=[0]*6160 graph=nx.convert_node_labels_to_integers(graph)161 A=nx.adjacency_matrix(graph, weight=None)162 for node in graph.nodes():163 for path in findPaths(graph, node, 3):164 aux=A[path[0],path[2]]+A[path[0],path[3]]+A[path[1],path[3]]165 if aux==3:166 #6 edges : type 1 connected 4graphlet (complete)167 res[0]=res[0]+1168 #print('aux vaut 3!!!')169 elif aux==2:170 #5 edges : type 2 connected 4graphlet171 res[1]=res[1]+1172 #print('aux vaut 2!')173 elif aux==1:174 #4 edges : either of type 3 or 5 connected 4graphlet175 if A[path[0],path[3]]==1:176 #then type 5 connected 4graphlet177 res[4]=res[4]+1178 else:179 #then type 3 connected 4graphlet180 res[2]=res[2]+1181 else:182 #3 edges : type 6 connected 4graphlet183 res[5]=res[5]+1184 #now we have to count 3-stars185 if graph.degree(node)>2:186 for subset in itertools.combinations(graph.neighbors(node), 3):187 if (A[subset[0],subset[1]]==0 188 and A[subset[1],subset[2]]==0 189 and A[subset[2],subset[0]]==0):190 #then type 6 connected 4graphlet (3-star)191 res[3]=res[3]+1192 w = [1/24, 1/12, 1/4, 1, 1/8, 1/2]193 res=[a*b for a,b in zip(res,w)]194 return res195def count_all_connected_5graphlets(graph):196 """Establish distribution of all-connected 5-graphlet in graph"""197 res=[0]*21198 graph=nx.convert_node_labels_to_integers(graph)199 A=nx.adjacency_matrix(graph, weight=None)200 for node in graph.nodes():201 for path in findPaths(graph, node, 4):202 sub=graph.subgraph([path[0],path[1],path[2],path[3],path[4]])203 aux=A[path[0],204 path[2]]+A[path[0],205 path[3]]+A[path[0],206 path[4]]+A[path[1],207 path[3]]+A[path[1],208 path[4]]+A[path[2],209 path[4]] 210 if aux==6:211 #10 edges : type 1 connected 5graphlet (complete)212 res[0]=res[0]+1213 elif aux==5:214 #9 edges : type 2 connected 5graphlet215 res[1]=res[1]+1216 elif aux==4:217 #if it has 8 edges, it can be either graphlet 3 or 4, 218 #which can be distinguished by looking at the minimum degree of the graphlet219 aux2=[sub.degree(path[0]),220 sub.degree(path[1]),221 sub.degree(path[2]),222 sub.degree(path[3]),223 sub.degree(path[4])]224 if 2 in aux2:225 #then type 4226 res[3]=res[3]+1227 else:228 #then type 3229 res[2]=res[2]+1230 elif aux==3:231 #if the graphlet has 7 edges, it can be of type 5, 6, 9, or 14232 aux2=sorted([sub.degree(path[0]),233 sub.degree(path[1]),234 sub.degree(path[2]),235 sub.degree(path[3]),236 sub.degree(path[4])])237 if aux2[0]==1:238 #then type 9239 res[8]=res[8]+1240 elif aux2[1]==3:241 #then type 5242 res[4]=res[4]+1243 elif aux2[2]==2:244 #then type 14245 res[13]=res[13]+1246 else:247 #then type 6248 res[5]=res[5]+1249 elif aux==2:250 aux1=[sub.degree(path[0]),251 sub.degree(path[1]),252 sub.degree(path[2]),253 sub.degree(path[3]),254 sub.degree(path[4])]255 aux2=sorted(aux1)256 if aux2[0]==1:257 if aux2[2]==2:258 #then type 16259 res[15]=res[15]+1260 else:261 #then type 10262 res[9]=res[9]+1263 elif aux2[3]==2:264 #then type 11265 res[10]=res[10]+1266 else:267 aux1=np.array(aux1)268 ind=np.where(aux1 == 3)[0]269 if A[path[ind[0]],[path[ind[1]]]]==1:270 #then type 7271 res[6]=res[6]+1272 else:273 #then type 15274 res[14]=res[14]+1275 elif aux==1:276 aux1=[sub.degree(path[0]),277 sub.degree(path[1]),278 sub.degree(path[2]),279 sub.degree(path[3]),280 sub.degree(path[4])]281 aux2=sorted(aux1)282 if aux2[0]==2:283 #then type 8284 res[7]=res[7]+1285 elif aux2[1]==1:286 #then type 18287 res[17]=res[17]+1288 else:289 aux1=np.array(aux1)290 ind1=np.where(aux1 == 1)[0]291 ind3=np.where(aux1 == 3)[0]292 if A[path[ind1[0]],[path[ind3[0]]]]==1:293 #then type 17294 res[16]=res[16]+1295 else:296 #then type 12297 res[11]=res[11]+1298 else:299 #then type 13300 res[12]=res[12]+1301 302 if graph.degree(node)>3:303 for subset in itertools.combinations(graph.neighbors(node), 4):304 a=[A[subset[0],subset[1]], A[subset[1],subset[2]],A[subset[2],subset[3]],A[subset[3],subset[0]]]305 if sum(a)==0:306 #then type 21307 res[20]=res[20]+1308 309 elif sum(a)==1:310 #then type 19311 res[18]=res[18]+1312 #if graph.degree(node)>2:313 #for subset in itertools.combinations(graph.neighbors(node), 3): 314 315 316 w = [1/120, 1/72, 1/48, 1/36, 1/28, 1/20, 1/14, 1/10, 1/12, 317 1/8, 1/8, 1/4, 1/2, 1/12, 1/12, 1/4, 1/4, 1/2, 1,1/2,1] 318 res=[a*b for a,b in zip(res,w)] 319 return res320def compute_all_connected_34graphlet(list_graphs):321 """Computes all connected 3,4-graphlets kernel matrix, weight option"""322 start_time_all=time.time()323 d1 = np.zeros((len(list_graphs), 2+6))324 for i, commune in enumerate(list_graphs):325 #print(commune)326 graph=nx.read_gexf('your_dir/'+commune+'.gexf', 327 node_type=None, relabel=True, version='1.1draft')328 d1[i] = np.concatenate((count_all_connected_3graphlets(graph),329 count_all_connected_4graphlets(graph)))330 #normalize by the number of graphlets331 d1[i]=d1[i]/sum(d1[i])332 #print(d1[i])333 #w = [100,3,1000, 1000, 100, 10, 50, 2]334 #d1[i]=[a*b for a,b in zip(d1[i],w)]335 if i%100==0:336 print(i,'graphs done')337 print("--- %s seconds of computing, still running... ---" 338 % (time.time() - start_time_all))339 print("--- %s seconds (entire kernel matrix computation time) ---" 340 % (time.time() - start_time_all)) 341 return d1.dot(d1.T)342def compute_all_connected_34graphlet_2_categories_plus_predict(list_graphs_train_1, 343 list_graphs_train_2, 344 list_graphs_test_1, 345 list_graphs_test_2):346 """For binary classification"""347 start_time_all=time.time()348 size_train=len(list_graphs_train_1)+len(list_graphs_train_2)349 size_test=len(list_graphs_test_1)+len(list_graphs_test_2)350 d_train_1 = np.zeros((len(list_graphs_train_1), 2+6))351 d_train_2 = np.zeros((len(list_graphs_train_2), 2+6))352 d_test_1 = np.zeros((len(list_graphs_test_1), 2+6))353 d_test_2 = np.zeros((len(list_graphs_test_2), 2+6))354 #w=np.load('inv_freq.npy')355 w=np.ones(8)356 #w[1]=w[7]=0357 358 # for train359 360 for i, commune in enumerate(list_graphs_train_1):361 #print(commune)362 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 363 node_type=None, relabel=True, version='1.1draft')364 d_train_1[i] = np.concatenate((count_all_connected_3graphlets(graph),365 count_all_connected_4graphlets(graph)))366 #normalize by the number of graphlets367 d_train_1[i]=d_train_1[i]/sum(d_train_1[i])368 #w = [100,3,1000, 1000, 100, 10, 50, 2]369 d_train_1[i]=[a*b for a,b in zip(d_train_1[i],w)]370 if i%100==0:371 print(i,'graphs done')372 print("--- %s seconds of computing (train 1 phase) ---" 373 % (time.time() - start_time_all))374 375 for i, commune in enumerate(list_graphs_train_2):376 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 377 node_type=None, relabel=True, version='1.1draft')378 d_train_2[i] = np.concatenate((count_all_connected_3graphlets(graph),379 count_all_connected_4graphlets(graph)))380 #normalize by the number of graphlets381 d_train_2[i]=d_train_2[i]/sum(d_train_2[i])382 #w = [100,3,1000, 1000, 100, 10, 50, 2]383 d_train_2[i]=[a*b for a,b in zip(d_train_2[i],w)]384 if i%100==0:385 print(i,'graphs done')386 print("--- %s seconds of computing (train 2 phase) ---" 387 % (time.time() - start_time_all))388 389 d_train=np.concatenate([d_train_1, d_train_2])390 391 ker=d_train.dot(d_train.T)392 #see convenient tools393 ker_norm=normalize_kernel_matrix(ker)394 print("--- %s seconds (entire train kernel matrix computation time) ---" 395 % (time.time() - start_time_all)) 396 397 # for test398 399 for i, commune in enumerate(list_graphs_test_1):400 #print(commune)401 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 402 node_type=None, relabel=True, version='1.1draft')403 d_test_1[i] = np.concatenate((count_all_connected_3graphlets(graph),404 count_all_connected_4graphlets(graph)))405 #normalize by the number of graphlets406 d_test_1[i]=d_test_1[i]/sum(d_test_1[i])407 #w = [100,3,1000, 1000, 100, 10, 50, 2]408 d_test_1[i]=[a*b for a,b in zip(d_test_1[i],w)]409 if i%100==0:410 print(i,'graphs done')411 print("--- %s seconds of computing (test 1 phase) ---" 412 % (time.time() - start_time_all))413 414 for i, commune in enumerate(list_graphs_test_2):415 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 416 node_type=None, relabel=True, version='1.1draft')417 d_test_2[i] = np.concatenate((count_all_connected_3graphlets(graph),418 count_all_connected_4graphlets(graph)))419 #normalize by the number of graphlets420 d_test_2[i]=d_test_2[i]/sum(d_test_2[i])421 #w = [100,3,1000, 1000, 100, 10, 50, 2]422 d_test_2[i]=[a*b for a,b in zip(d_test_2[i],w)]423 if i%100==0:424 print(i,'graphs done')425 print("--- %s seconds of computing (test 2 phase) ---" 426 % (time.time() - start_time_all))427 428 d_test=np.concatenate([d_test_1, d_test_2])429 430 test=d_test.dot(d_train.T)431 432 aux=d_test.dot(d_test.T)433 test_norm=np.zeros((size_test,size_train))434 for i in range(size_test):435 for j in range(size_train):436 test_norm[i,j]=test[i,j]/math.sqrt(aux[i,i]*ker[j,j])437 438 return ker, ker_norm, test, test_norm439def compute_all_connected_345graphlet_2_categories_plus_predict(list_graphs_train_1, 440 list_graphs_train_2, 441 list_graphs_test_1, 442 list_graphs_test_2):443 444 """For binary classification"""445 start_time_all=time.time()446 size_train=len(list_graphs_train_1)+len(list_graphs_train_2)447 size_test=len(list_graphs_test_1)+len(list_graphs_test_2)448 d_train_1 = np.zeros((len(list_graphs_train_1), 2+6+21))449 d_train_2 = np.zeros((len(list_graphs_train_2), 2+6+21))450 d_test_1 = np.zeros((len(list_graphs_test_1), 2+6+21))451 d_test_2 = np.zeros((len(list_graphs_test_2), 2+6+21))452 w=np.ones(29)453 #w=np.load('inv_freq_345.npy')454 #w[1]=w[7]=w[20]=0455 list_delete=[]456 457 # for train458 459 for i, commune in enumerate(list_graphs_train_1):460 461 try:462 463 #print(commune)464 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 465 node_type=None, relabel=True, version='1.1draft')466 d_train_1[i] = np.concatenate((count_all_connected_3graphlets(graph),467 count_all_connected_4graphlets(graph),468 count_all_connected_5graphlets(graph)))469 #normalize by the number of graphlets470 d_train_1[i]=d_train_1[i]/sum(d_train_1[i])471 d_train_1[i]=[a*b for a,b in zip(d_train_1[i],w)]472 if i%100==0:473 print(i,'graphs done')474 print("--- %s seconds of computing (train 1 phase) ---" 475 % (time.time() - start_time_all))476 477 except IndexError:478 print(commune, 'does not work')479 list_delete.append(i)480 481 for i in list_delete:482 d_train_1 = np.delete(d_train_1, (i), axis=0)483 484 list_delete=[]485 486 for i, commune in enumerate(list_graphs_train_2):487 488 try:489 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 490 node_type=None, relabel=True, version='1.1draft')491 d_train_2[i] = np.concatenate((count_all_connected_3graphlets(graph),492 count_all_connected_4graphlets(graph),493 count_all_connected_5graphlets(graph)))494 #normalize by the number of graphlets495 d_train_2[i]=d_train_2[i]/sum(d_train_2[i])496 d_train_2[i]=[a*b for a,b in zip(d_train_2[i],w)]497 if i%100==0:498 print(i,'graphs done')499 print("--- %s seconds of computing (train 2 phase) ---" 500 % (time.time() - start_time_all))501 502 except IndexError:503 print(commune, 'does not work')504 list_delete.append(i)505 506 for i in list_delete:507 d_train_2 = np.delete(d_train_2, (i), axis=0)508 509 list_delete=[]510 511 print('number of first label graphs in train :', len(d_train_1))512 print('number of second label graphs in train :', len(d_train_2))513 514 size_train=len(d_train_1)+len(d_train_2)515 516 d_train=np.concatenate([d_train_1, d_train_2])517 518 """The next comment line can be extremely useful !"""519 520 #d_train=(d_train-mean(d_train))/std(d_train)521 522 ker=d_train.dot(d_train.T)523 #see convenient tools524 ker_norm=normalize_kernel_matrix(ker)525 print("--- %s seconds (entire train kernel matrix computation time) ---" 526 % (time.time() - start_time_all)) 527 528 # for test529 530 for i, commune in enumerate(list_graphs_test_1):531 532 try:533 534 #print(commune)535 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 536 node_type=None, relabel=True, version='1.1draft')537 d_test_1[i] = np.concatenate((count_all_connected_3graphlets(graph),538 count_all_connected_4graphlets(graph),539 count_all_connected_5graphlets(graph)))540 #normalize by the number of graphlets541 d_test_1[i]=d_test_1[i]/sum(d_test_1[i])542 d_test_1[i]=[a*b for a,b in zip(d_test_1[i],w)]543 if i%100==0:544 print(i,'graphs done')545 print("--- %s seconds of computing (test 1 phase) ---" 546 % (time.time() - start_time_all))547 548 except IndexError:549 print(commune, 'does not work')550 list_delete.append(i)551 552 for i in list_delete:553 d_test_1 = np.delete(d_test_1, (i), axis=0)554 555 list_delete=[]556 557 for i, commune in enumerate(list_graphs_test_2):558 559 try:560 561 graph=nx.read_gexf('/your_dir/'+commune+'.gexf', 562 node_type=None, relabel=True, version='1.1draft')563 d_test_2[i] = np.concatenate((count_all_connected_3graphlets(graph),564 count_all_connected_4graphlets(graph),565 count_all_connected_5graphlets(graph)))566 #normalize by the number of graphlets567 d_test_2[i]=d_test_2[i]/sum(d_test_2[i])568 d_test_2[i]=[a*b for a,b in zip(d_test_2[i],w)]569 if i%100==0:570 print(i,'graphs done')571 print("--- %s seconds of computing (test 2 phase) ---" 572 % (time.time() - start_time_all))573 574 except IndexError:575 print(commune, 'does not work')576 list_delete.append(i)577 578 for i in list_delete:579 d_test_2 = np.delete(d_test_2, (i), axis=0)580 581 list_delete=[]582 583 print('number of first label graphs in test :', len(d_test_1))584 print('number of second label graphs in test :', len(d_test_2))585 586 size_test=len(d_test_1)+len(d_test_2)587 588 d_test=np.concatenate([d_test_1, d_test_2])589 590 """The next comment line can be extremely useful !"""591 592 #d_test=(d_test-mean(d_test))/std(d_test)593 594 test=d_test.dot(d_train.T)595 596 aux=d_test.dot(d_test.T)597 test_norm=np.zeros((size_test,size_train))598 for i in range(size_test):599 for j in range(size_train):600 test_norm[i,j]=test[i,j]/math.sqrt(aux[i,i]*ker[j,j])601 ...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!