Best Python code snippet using hypothesis
constructNetWork_RNN_Sib.py
Source:constructNetWork_RNN_Sib.py
1import sys2sys.path.append('../nn')3sys.path.append('../')4import Layers as Lay, Connections as Con, Activation5##############################6# hyperparam7class info:8 parent = None9 childrenList = None10 def __init__(self, parent=None):11 self.parent = parent12 self.childrenList = []13def ConstructTreeConvolution(nodes, numFea, numRecur, numDis, numOut, \14 Wleft, Wright, Bconstruct, \15 Wcomb_ae, Wcomb_orig, \16 Wrecur_root, Wrecur_left, Wrecur_right, Wrecur_sib, Brecur, \17 Wdis, Woutput, Bdis, Boutput, \18 poolCutoff19 ):20 # nodes21 # numFea: # of the word/symbol feature size22 # numCon: # of the convolution size23 # Wleft: left weights of continous binary tree autoencoder24 # Wright: right weights of continous binary tree autoencoder25 # Bconstruct: the biase for the autoencoder26 # Wcomb_ae, Wcomb_orig: the weights for the combination of27 # autoencoder and the original vector28 # (no biase for this sate)29 # Wconv_root, Wconv_left, Wconv_right, Bconv: the weights for covolution30 # Bconv: Biases for covolution31 numNodes = len(nodes)32 layers = [None] * numNodes33 # construct layers for each node34 # layers = |---leaf---|---non_leaf---|35 numLeaf = 036 for idx in xrange(numNodes):37 node = nodes[idx]38 if len(node.children) == 0:39 numLeaf += 140 layers[idx] = Lay.layer('vec_' + str(idx) + '_' + node.word, \41 range(node.bidx, node.bidx + numFea), \42 numFea43 )44 layers[idx].act = 'embedding'45 # auto encoding46 # layers = |---leaf---|---non_leaf(autoencoded)---| (numNodes)47 # |---non_leaf(original)---| ( numNonLeaf)48 numNonLeaf = numNodes - numLeaf49 layers.extend([None] * (2 * numNonLeaf))50 for idx in xrange(numLeaf, numNodes):51 node = nodes[idx]52 layers[idx + numNonLeaf] = layers[idx]53 tmplayer = Lay.layer('ae_' + str(idx) + '_' + node.word, \54 Bconstruct, numFea)55 tmplayer.act = 'autoencoding'56 layers[idx] = tmplayer57 # add reconstruction connections58 for idx in xrange(0, numNodes):59 node = nodes[idx]60 if node.parent == None:61 continue62 tmplayer = layers[idx]63 parent = layers[node.parent]64 if node.leftRate != 0:65 leftcon = Con.connection(tmplayer, parent, \66 numFea, numFea, Wleft,67 Wcoef=node.leftRate * node.leafNum / nodes[node.parent].leafNum)68 if node.rightRate != 0:69 rightcon = Con.connection(tmplayer, parent, \70 numFea, numFea, Wright,71 Wcoef=node.rightRate * node.leafNum / nodes[node.parent].leafNum)72 # combinition of the constructed and original value73 # layers = |---leaf---|---non_leaf(combinition)---| (numNodes)74 # |---non_leaf(original)---|---non_leaf(ae)---| (2 * numNonLeaf)75 for idx in xrange(numLeaf, numNodes):76 aelayer = layers[idx]77 origlayer = layers[idx + numNonLeaf]78 layers[idx + numNonLeaf * 2] = aelayer79 comlayer = Lay.layer('comb_' + str(idx) + '_' + nodes[idx].word, None, numFea)80 comlayer.act = 'combination'81 layers[idx] = comlayer82 # connecton auto encoded vector and original vector83 con_ae = Con.connection(aelayer, comlayer, numFea, numFea, Wcomb_ae)84 con_orig = Con.connection(origlayer, comlayer, numFea, numFea, Wcomb_orig)85 # CONVOLVE!!! and POOOOL!!!86 # layers = |---leaf---|---non_leaf(combition)---| => (numNodes)87 # |---non_leaf(original)---|---non_leaf(ae)---| => (2 * numNonLeaf)88 # |------------convolution----------|89 queue = [(numNodes - 1, None)]90 rootChildrenNum = len(nodes[-1].children) - 191 recurLayers ={} # the map of recursive layer92 #copy leaf93 for idx in xrange(0, numLeaf): # leaf ---> recursive leaf: in numFea, out numRecur94 recurLayers[idx] = Lay.layer('Recur_' + str(idx) + '_' + nodes[idx].word, \95 Brecur, numRecur)96 Con.connection(layers[idx], recurLayers[idx], numFea, numRecur, Wrecur_root)97 while True:98 curLen = len(queue)99 # layerCnt.append( curLen )100 if curLen == 0:101 break102 nextQueue = []103 for (nodeidx, info) in queue:104 curLayer = layers[nodeidx]105 curNode = nodes[nodeidx]106 childNum = len(curNode.children)107 if childNum == 0: # leaf node108 queue = nextQueue109 continue110 # create recursive node111 if nodeidx not in recurLayers.keys():112 recurLayer = Lay.layer('Recur_' + str(nodeidx) + '_'+ curNode.word, \113 Brecur, numRecur)114 recurLayer.act = 'recursive'115 #layers.append(recurLayer)116 recurLayers[nodeidx] = recurLayer117 recurLayer = recurLayers[nodeidx]118 # add root connection from Combination layer119 rootCon = Con.connection(curLayer, recurLayer, numFea, numRecur, Wrecur_root)120 # add connection from one previous sibling121 sibs_idx = curNode.siblings122 sibs_idx = [i for i in sibs_idx if i < nodeidx]123 if len(sibs_idx)>0:124 sibs_idx.sort(reverse=True)125 sib_idx = sibs_idx[0]126 sibNode = nodes[sib_idx]127 if sib_idx not in recurLayers.keys():128 sibLayer = Lay.layer('Recur_' + str(sib_idx) + '_' + sibNode.word, \129 Brecur, numRecur)130 recurLayers[sib_idx] = sibLayer131 sibLayer = recurLayers[sib_idx]132 sib_childrenNum = len(sibNode.children)133 if sib_childrenNum == 0:134 sib_childrenNum = 1135 sib_Weight = 1.0 * sib_childrenNum / len(curNode.siblings)136 sibCon = Con.connection(sibLayer, recurLayer, \137 numRecur, numRecur, Wrecur_sib, sib_Weight)138 # for each child of the current node139 for child in curNode.children:140 childNode = nodes[child]141 if child not in recurLayers.keys():142 childLayer = Lay.layer('Recur_' + str(child) + '_' + childNode.word, \143 Brecur, numFea)144 #layers.append(childLayer)145 recurLayers[child] = childLayer146 childLayer = recurLayers[child]147 nextQueue.append((child, '')) # add to child148 if childNum == 1:149 leftWeight = .5150 rightWeight = .5151 else:152 rightWeight = childNode.pos / (childNum - 1.0)153 leftWeight = 1 - rightWeight154 if leftWeight != 0:155 leftCon = Con.connection(childLayer, recurLayer, \156 numRecur, numRecur, Wrecur_left, leftWeight)157 if rightWeight != 0:158 rightCon = Con.connection(childLayer, recurLayer, \159 numRecur, numRecur, Wrecur_right, rightWeight)160 # end of each child of the current node161 queue = nextQueue162 # end of current layer163 # add recursive layer164 for idx in xrange(0, numNodes):165 layers.append(recurLayers[idx])166 # reorder167 # layers = |---leaf---|---non_leaf(ae)---| => (numNodes)168 # |---non_leaf(original)---|---non_leaf(comb)---| => (2 * numNonLeaf)169 # |------------convolution----------|170 for idx in xrange(numLeaf, numLeaf + numNonLeaf):171 tmp = layers[idx]172 layers[idx] = layers[idx + 2 * numNonLeaf]173 layers[idx + 2 * numNonLeaf] = tmp174 # discriminative layer175 # layers = |---leaf---|---non_leaf(ae)---| => (numNodes)176 # |---non_leaf(original)---|---non_leaf(comb)---| => (2 * numNonLeaf)177 # |------------recursive----------|178 # |---discriminative layer-----|179 # |--output--|180 lenlayer = len(layers)181 rootRecur = recurLayers[numNodes-1]182 discriminative = Lay.layer('discriminative', Bdis, numDis)183 discriminative.act = 'hidden'184 output = Lay.layer('outputlayer', Boutput, numOut)185 output.act = 'softmax'186 # One Weight Size187 con = Con.connection(rootRecur, discriminative, numFea, numDis, Wdis)188 outcon = Con.connection(discriminative, output, numDis, numOut, Woutput)189 if numOut > 1:190 output._activate = Activation.softmax191 output._activatePrime = None192 layers.append(discriminative)193 layers.append(output)194 # add successive connections195 numlayers = len(layers)196 for idx in xrange(numlayers):197 if idx > 0:198 layers[idx].successiveLower = layers[idx - 1]199 if idx < numlayers - 1:200 layers[idx].successiveUpper = layers[idx + 1]201 return layers202# jjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjj203'''204 tmplayer = Lay.layer('ae_'+str(idx)+'_'+node.word,\205 Bconstruct[0], numFea)206 layers[idx] = tmplayer207 conLayer = Lay.layer('Convolve_' + curLayer.name, \208 Bconv[0], numCon)209 conLayer.act = 'convolution'210 discriminative = Lay.layer( 'discriminative', Bdis[0], numDis)211 discriminative.act = 'hidden'212 output = Lay.layer('outputlayer', Boutput[0], numOut)213 output.act = 'softmax'...
longest_subsequence.py
Source:longest_subsequence.py
...7 :param s2: String 28 :return: Length of the Greatest Common Subsequence9 """1011 def len_recur(i, j):12 if i < 0 or j < 0:13 return 014 if s1[i] == s2[j]:15 length = 1 + len_recur(i - 1, j - 1)16 else:17 length = max(len_recur(i, j - 1),18 len_recur(i - 1, j))19 return length2021 return len_recur(len(s1) - 1, len(s2) - 1)222324def greatest_common_subsequence_memoized(s1, s2):25 """26 :param s1: String 127 :param s2: String 228 :return: Length of the Greatest Common Subsequence29 """3031 lookup = [[-1] * len(s1)] * len(s2) # Initialize cells in the lookup table to -13233 def len_recur(i, j):34 if i < 0 or j < 0:35 return 036 length = lookup[i][j]37 if length != -1:38 return length39 elif s1[i] == s2[j]:40 length = 1 + len_recur(i - 1, j - 1)41 else:42 length = max(len_recur(i, j - 1),43 len_recur(i - 1, j))44 lookup[i][j] = length45 return length4647 return len_recur(len(s1) - 1, len(s2) - 1)484950def greatest_mismatching_subsequence_memoized(s1, s2):51 """52 :param s1: String 153 :param s2: String 254 :return: Length of the Greatest Mismatching Subsequence55 """5657 lookup = [[-1] * len(s1)] * len(s2) # Initialize cells in the lookup table to -15859 def len_recur(i, j):60 if i < 0 or j < 0:61 return 062 length = lookup[i][j]63 if length != -1:64 return length65 elif s1[i] != s2[j]:66 length = 1 + len_recur(i - 1, j - 1)67 else:68 length = max(len_recur(i, j - 1),69 len_recur(i - 1, j))70 lookup[i][j] = length71 return length7273 return len_recur(len(s1) - 1, len(s2) - 1)747576L = []777879def greatest_common_subsequence_path_memoized(s1, s2):80 lookup = [[-1] * (len(s2) + 1)] * (len(s1) + 1) # Initialize cells in the lookup table to -181 whereto = [[None] * (len(s2) + 1)] * (len(s1) + 1)8283 def len_recur(i, j):84 if i < 0 or j < 0:85 return 086 length = lookup[i][j]87 if length != -1:88 return length89 elif s1[i] == s2[j]:90 length = 1 + len_recur(i - 1, j - 1)91 r, s = i - 1, j - 192 else:93 length = max(len_recur(i, j - 1),94 len_recur(i - 1, j))95 if length == len_recur(i, j - 1):96 r, s = i, j - 197 else:98 r, s = i - 1, j99 lookup[i][j] = length100 whereto[i][j] = [r, s]101 return length102103 def print_path(i, j):104 if i > 0 and j > 0:105 [r, s] = whereto[i][j]106 print_path(r, s)107 if r == i - 1 and s == j - 1:108 print(s1[i], end='')109110 try:111 return len_recur(len(s1) - 1, len(s2) - 1)112 finally:113 global L114 L = lookup115 # print(lookup)116 # print_path(len(s1) - 1, len(s2) - 1)117 # print()118119120def build_string(A, B, Look):121 def recur(i, j):122 if i == 0 or j == 0: return ''123 if A[i] == B[j]:124 print(A[i])125 return recur(i, j) + A[i]126 elif i != 0 and Look[i][j] == Look[i - 1][j]:127 return recur(i - 1, j)128 else:129 return recur(i, j - 1)130131 return recur(len(A) - 1, len(B) - 1)132133134if __name__ == '__main__':135 A = 'bcaax'136 B = 'bcda'137 print(greatest_common_subsequence_path_memoized(A, B))138 print(L)
...
loop_to_recur.py
Source:loop_to_recur.py
...17 for i in range(n, 1, -1):18 acc *= i19 return acc20# print(multiply(5))21def multiply_recur(n, acc=1):22 if n == 0:23 return acc24 acc *= n25 return multiply_recur(n-1, acc)26 27# print(multiply_recur(5))28# def recur_multiply(n, acc = 1):29# if n < 0:30# return 031# j = 032# while j < n:33# print(acc, (n*j))34# acc += (n*j)35# j += 136 37# return acc + recur_multiply(n-1)38# print(recur_multiply(5))39# nested_loops40# acc = 141# for i = 100; i > 0; i--:...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!