Best Python code snippet using fMBT_python
waveform.py
Source:waveform.py
1import json2import numpy as np3import pylab as pl4def waveform(f, A, b, t0, tend, d_end_t=None, gamma=0.0, phi0=0.0, 5 N=1000, verbose=False, seed_number=None, project_name=None):6 """7 METHOD8 ======9 Takes input parameters of a wave and the strength and duration of10 noise, and returns the data.11 PARAMETERS12 ==========13 f : (Float) Frequency of the signal14 A : (Float) Amplitude of the signal15 b : (Float) Amplitude of the noise16 t0 : (Float) Timestamp of the beginning of the signal17 tend : (Float) Time stamp of the end of the signal18 d_end_t : (Float) Time stamp of the end time of the data. Default = None19 gamma : (Float) Attenuation factor of the signal. Default = 0.020 phi0 : (Float) Initial phase of the signal. Default = 0.021 N : (Int) Total number of time stamps. Default = 100022 verbose: (Bool) Set True to get diagnostic stdout. Default = False23 seed_number: (Int) Number set to seed noise. Default = None24 project_name: (String) Name given to png and json file created. Default = None25 OUTPUT26 ======27 A tuple of a float and two numpy arrays (dt, T_full, d), where dt is 28 the resolution of the time series. T_full is the full list of time stamps29 of the data starting at 0 and ending and d_end_t, and d is the 30 corresponding displacement values in the data.31 """32 33 # Conditional for noise duration34 # If the data-end time is supplied to be too small:35 if verbose:36 print("Making sure that the stretch of data is longer than signal")37 assert t0 > 0, "Signal should start later than t=0"38 if (d_end_t is None) or (tend > d_end_t - 10):39 d_end_t = tend + 1040 if verbose:41 print("data end time is set at {}".format(d_end_t))42 43 T = np.linspace(t0, tend, N) # Time stamps of signal44 dt = np.mean(np.diff(T)) # figuring out the resolution of the series45 if verbose:46 print("Mean value of timing resolution = {}".format(dt))47 48 t = t0 # Initializing the time series at the start time49 t_minus = [] # To populate time stamps prior to the signal start50 while t >= 0: # Making sure that we reach all the way back to zero.51 t = t - dt52 t_minus.append(t) # Create time spamps from (t0-dt) to 053 t_minus = np.array(t_minus)[::-1] # Reverse to be from 0 to t054 t_minus = t_minus[t_minus >= 0] # Eliminate numbers less than 055 56 t_plus = np.arange(tend+dt, d_end_t, dt) # Time stamps from (tend+dt) to d_end_t, in dt's57 58 T_full = np.hstack((t_minus, T, t_plus)) # Connect time stamps59 60 dev = np.std(np.diff(T_full)) # Standard deviation in dt's of T_full61 if verbose:62 print("Standard deviation of the resolution of time = {}".format(dev))63 if verbose:64 print("Creating time series of the signal...")65 w = 2 * np.pi * f 66 y = A*np.sin(w*T + phi0)*np.exp(-gamma*(T-t0))67 68 # Padding of signal data69 if verbose:70 print("Creating the zero-padded signal...")71 y_minus = np.zeros_like(t_minus)72 y_plus = np.zeros_like(t_plus)73 y_full = np.hstack((y_minus, y, y_plus))74 75 if verbose:76 print("Creating random noise...")77 if seed_number is None:78 seed_number = 179 np.random.seed(seed = seed_number)80 noise = -b+2*b*np.random.random(len(T_full)) # Noise!81 82 if verbose:83 print("Creating final data")84 d = noise + y_full # Complete Data!85 86 # Graphing 87 pl.rcParams.update({'font.size': 18})88 pl.figure(figsize=(20,15))89 pl.plot(T_full, noise, color = 'green', linewidth=2) # Noise90 pl.plot(T_full, d, color = 'black', linewidth=2) # Combined91 pl.plot(T, y, color = 'orange', linewidth=2) # Signal92 pl.xlabel("Time")93 pl.ylabel("displacement")94 text = "f={}; A={}; b={}; t0={}; tend={}; gamma={}; N={}"95 pl.title(text.format(f, A, b, t0, tend, gamma, N))96 #if project_name is None:97 # project_name = "test"98 #pl.savefig("figures/{}-waveform_plot-f_{}-A_{}-b_{}-t0_{}-tend_{}-gamma_{}-seed_{}.png".format(project_name, f, A, b, t0, tend, gamma, seed_number))99 100 T_full = T_full101 d = d102 #data = {"dt" : dt, "t_full" : T_full, "d" : d}103 #outputfile = "data/{}-waveform_data-f_{}-A_{}-b_{}-t0_{}-tend_{}-gamma_{}-seed_{}.json".format(project_name, f, A, b, t0, tend, gamma, seed_number)104 #with open(outputfile, "w") as f:105 # json.dump(data, f, indent=2, sort_keys=True)...
calcSimilarity.py
Source:calcSimilarity.py
1'''2File name: calcSimilarity.py3Author: Ningshan Zhang, Zheyuan Xie4Date created: 2018-12-195'''6import cv27import numpy as np8def calcSimilarity(landmarks1, landmarks2):9 T = cv2.estimateRigidTransform(landmarks1, landmarks2, False)10 if T is None:11 return np.inf12 T_full = np.vstack((T,np.array([0,0,1])))13 landmarks1_full = np.vstack((landmarks1.T,np.ones((1,landmarks1.shape[0]))))14 landmarks1_trans = np.dot(T_full,landmarks1_full)15 landmarks1_trans = landmarks1_trans[0:2,:].T16 dist = np.sum(np.sum((landmarks1_trans-landmarks2)**2,axis=1))17 return dist18# match a single face for all target frames19def findMinDistFace_static(landmarks1, landmarks2):20 faceind = 021 mindist = np.inf22 for i in range(len(landmarks1)):23 if landmarks1[i] is None:24 continue25 dist = 026 for j in range(len(landmarks2)):27 if landmarks2[j] is None:28 continue29 T = cv2.estimateRigidTransform(landmarks1[i], landmarks2[j], False)30 if T is None:31 continue32 T_full = np.vstack((T,np.array([0,0,1])))33 landmarks1_full = np.vstack((landmarks1[i].T,np.ones((1,landmarks1[i].shape[0]))))34 landmarks1_trans = np.dot(T_full,landmarks1_full)35 landmarks1_trans = landmarks1_trans[0:2,:].T36 dist = dist + calcSimilarity(landmarks1_trans,landmarks2[j])37 if dist < mindist:38 faceind = i39 mindist = dist40 return (faceind * np.ones((len(landmarks2),))).astype(int)41# match a source face for each target frames42def findMinDistFace(landmarks1, landmarks2):43 faceind = np.zeros((len(landmarks2),))44 for i in range(len(landmarks2)):45 if landmarks2[i] is None:46 continue47 mindist = np.inf48 for j in range(len(landmarks1)):49 if landmarks1[j] is None:50 continue51 T = cv2.estimateRigidTransform(landmarks1[j], landmarks2[i], False)52 if T is None:53 continue54 T_full = np.vstack((T,np.array([0,0,1])))55 landmarks1_full = np.vstack((landmarks1[j].T,np.ones((1,landmarks1[j].shape[0]))))56 landmarks1_trans = np.dot(T_full,landmarks1_full)57 landmarks1_trans = landmarks1_trans[0:2,:].T58 dist = calcSimilarity(landmarks1_trans,landmarks2[i])59 if dist < mindist:60 faceind[i] = j61 mindist = dist62 return faceind.astype(int)63if __name__ == "__main__":64 from loader import loadlandmarks_facepp, loadvideo65 import time66 easy1 = 'Datasets/Easy/FrankUnderwood.mp4'67 easy2 = 'Datasets/Easy/MrRobot.mp4'68 lm1 = loadlandmarks_facepp(easy2)69 lm2 = loadlandmarks_facepp(easy1)70 video2 = loadvideo(easy1)71 print(len(lm1))72 t0 = time.time()73 ind = findMinDistFace(lm1, lm2)74 ind_s = findMinDistFace_static(lm1,lm2)75 t1 = time.time()76 print(t1-t0)77 T = cv2.estimateRigidTransform(lm1[ind[0]], lm2[0], False)78 T_full = np.vstack((T,np.array([0,0,1])))79 landmarks1_full = np.vstack((lm1[ind[0]].T,np.ones((1,lm1[ind[0]].shape[0]))))80 landmarks1_trans = np.dot(T_full,landmarks1_full)81 landmarks1_trans = landmarks1_trans[0:2,:].T82 for groups in landmarks1_trans.astype(int):83 cv2.circle(video2[0], (groups[0],groups[1]), 1, (0, 255, 255), 2)84 for groups in lm2[0].astype(int):85 cv2.circle(video2[0], (groups[0],groups[1]), 1, (0, 0, 255), 2)86 for i in range(83):87 cv2.line(video2[0],88 (lm2[0].astype(int)[i,0],lm2[0].astype(int)[i,1]),89 (landmarks1_trans.astype(int)[i,0],landmarks1_trans.astype(int)[i,1]),(0,255,255),2)90 cv2.imshow('frame',video2[0])...
7_4_hints.py
Source:7_4_hints.py
1# 7-4 prep2dummies = pd.get_dummies(t[['sex', 'pclass']])3dummies.head()4t_full = pd.concat([t, dummies], axis=1)5t_full.head()6t_full = t_full.dropna(subset=['age', 'sex_male', 'fare', 'pclass_2nd', 'pclass_3rd', 'survived'])7t_full.shape8X = t_full[['age', 'sex_male', 'fare', 'pclass_2nd', 'pclass_3rd']]9y = t_full['survived']10X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=777)11sk_logit = LogisticRegression(penalty='none').fit(X_train, y_train)12sk_logit.coef_13prob_pred = sk_logit.predict_proba(X_test)[:, 0]14confusion_matrix(y_test, prob_pred > 0.45)15sns.heatmap(confusion_matrix(y_test, prob_pred > 0.4), 16annot=True, fmt='d', cmap='Blues');17plot_roc_curve(sk_logit, X_test, y_test);18FPR = 1 - specificity = FP / (FP + TN) = 19= FP / cond Negative20TPR = Sencitivity = TP / (TP + FN) = 21= TP / cond Positive22plot_precision_recall_curve(sk_logit, X_test, y_test);23recall = TPR = Sencitivity = TP / (TP + FN) = 24= TP / cond Positive25precision = TP / (TP + FP) =...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!